repo
stringclasses 358
values | pull_number
int64 6
67.9k
| instance_id
stringlengths 12
49
| issue_numbers
sequencelengths 1
7
| base_commit
stringlengths 40
40
| patch
stringlengths 87
101M
| test_patch
stringlengths 72
22.3M
| problem_statement
stringlengths 3
256k
| hints_text
stringlengths 0
545k
| created_at
stringlengths 20
20
| PASS_TO_PASS
sequencelengths 0
0
| FAIL_TO_PASS
sequencelengths 0
0
|
---|---|---|---|---|---|---|---|---|---|---|---|
getpelican/pelican | 3,215 | getpelican__pelican-3215 | [
"3178"
] | fab6e1a2c51317b1ff3523755cd82f7b3ea3d433 | diff --git a/pelican/tools/pelican_import.py b/pelican/tools/pelican_import.py
--- a/pelican/tools/pelican_import.py
+++ b/pelican/tools/pelican_import.py
@@ -390,22 +390,22 @@ def dc2fields(file):
post_format)
-def tumblr2fields(api_key, blogname):
- """ Imports Tumblr posts (API v2)"""
+def _get_tumblr_posts(api_key, blogname, offset=0):
import json
import urllib.request as urllib_request
+ url = ("https://api.tumblr.com/v2/blog/%s.tumblr.com/"
+ "posts?api_key=%s&offset=%d&filter=raw") % (
+ blogname, api_key, offset)
+ request = urllib_request.Request(url)
+ handle = urllib_request.urlopen(request)
+ posts = json.loads(handle.read().decode('utf-8'))
+ return posts.get('response').get('posts')
- def get_tumblr_posts(api_key, blogname, offset=0):
- url = ("https://api.tumblr.com/v2/blog/%s.tumblr.com/"
- "posts?api_key=%s&offset=%d&filter=raw") % (
- blogname, api_key, offset)
- request = urllib_request.Request(url)
- handle = urllib_request.urlopen(request)
- posts = json.loads(handle.read().decode('utf-8'))
- return posts.get('response').get('posts')
+def tumblr2fields(api_key, blogname):
+ """ Imports Tumblr posts (API v2)"""
offset = 0
- posts = get_tumblr_posts(api_key, blogname, offset)
+ posts = _get_tumblr_posts(api_key, blogname, offset)
subs = DEFAULT_CONFIG['SLUG_REGEX_SUBSTITUTIONS']
while len(posts) > 0:
for post in posts:
@@ -428,12 +428,10 @@ def get_tumblr_posts(api_key, blogname, offset=0):
fmtstr = '![%s](%s)'
else:
fmtstr = '<img alt="%s" src="%s" />'
- content = ''
- for photo in post.get('photos'):
- content += '\n'.join(
- fmtstr % (photo.get('caption'),
- photo.get('original_size').get('url')))
- content += '\n\n' + post.get('caption')
+ content = '\n'.join(
+ fmtstr % (photo.get('caption'),
+ photo.get('original_size').get('url'))
+ for photo in post.get('photos'))
elif type == 'quote':
if format == 'markdown':
fmtstr = '\n\n— %s'
@@ -483,7 +481,7 @@ def get_tumblr_posts(api_key, blogname, offset=0):
tags, status, kind, format)
offset += len(posts)
- posts = get_tumblr_posts(api_key, blogname, offset)
+ posts = _get_tumblr_posts(api_key, blogname, offset)
def feed2fields(file):
| diff --git a/pelican/tests/test_importer.py b/pelican/tests/test_importer.py
--- a/pelican/tests/test_importer.py
+++ b/pelican/tests/test_importer.py
@@ -1,7 +1,11 @@
+import datetime
import locale
import os
import re
from posixpath import join as posix_join
+from unittest.mock import patch
+
+import dateutil.tz
from pelican.settings import DEFAULT_CONFIG
from pelican.tests.support import (mute, skipIfNoExecutable, temporary_folder,
@@ -10,9 +14,12 @@
build_markdown_header,
decode_wp_content,
download_attachments, fields2pelican,
- get_attachments, wp2fields)
+ get_attachments, tumblr2fields,
+ wp2fields,
+ )
from pelican.utils import path_to_file_url, slugify
+
CUR_DIR = os.path.abspath(os.path.dirname(__file__))
BLOGGER_XML_SAMPLE = os.path.join(CUR_DIR, 'content', 'bloggerexport.xml')
WORDPRESS_XML_SAMPLE = os.path.join(CUR_DIR, 'content', 'wordpressexport.xml')
@@ -34,17 +41,26 @@
LXML = False
-@skipIfNoExecutable(['pandoc', '--version'])
-@unittest.skipUnless(BeautifulSoup, 'Needs BeautifulSoup module')
-class TestBloggerXmlImporter(unittest.TestCase):
-
+class TestWithOsDefaults(unittest.TestCase):
+ """Set locale to C and timezone to UTC for tests, then restore."""
def setUp(self):
self.old_locale = locale.setlocale(locale.LC_ALL)
locale.setlocale(locale.LC_ALL, 'C')
- self.posts = blogger2fields(BLOGGER_XML_SAMPLE)
+ self.old_timezone = datetime.datetime.now(dateutil.tz.tzlocal()).tzname()
+ os.environ['TZ'] = 'UTC'
def tearDown(self):
locale.setlocale(locale.LC_ALL, self.old_locale)
+ os.environ['TZ'] = self.old_timezone
+
+
+@skipIfNoExecutable(['pandoc', '--version'])
+@unittest.skipUnless(BeautifulSoup, 'Needs BeautifulSoup module')
+class TestBloggerXmlImporter(TestWithOsDefaults):
+
+ def setUp(self):
+ super().setUp()
+ self.posts = blogger2fields(BLOGGER_XML_SAMPLE)
def test_recognise_kind_and_title(self):
"""Check that importer only outputs pages, articles and comments,
@@ -85,17 +101,13 @@ def test_recognise_status_with_correct_filename(self):
@skipIfNoExecutable(['pandoc', '--version'])
@unittest.skipUnless(BeautifulSoup, 'Needs BeautifulSoup module')
-class TestWordpressXmlImporter(unittest.TestCase):
+class TestWordpressXmlImporter(TestWithOsDefaults):
def setUp(self):
- self.old_locale = locale.setlocale(locale.LC_ALL)
- locale.setlocale(locale.LC_ALL, 'C')
+ super().setUp()
self.posts = wp2fields(WORDPRESS_XML_SAMPLE)
self.custposts = wp2fields(WORDPRESS_XML_SAMPLE, True)
- def tearDown(self):
- locale.setlocale(locale.LC_ALL, self.old_locale)
-
def test_ignore_empty_posts(self):
self.assertTrue(self.posts)
for (title, content, fname, date, author,
@@ -477,3 +489,46 @@ def test_download_attachments(self):
self.assertTrue(
directory.endswith(posix_join('content', 'article.rst')),
directory)
+
+
+class TestTumblrImporter(TestWithOsDefaults):
+ @patch("pelican.tools.pelican_import._get_tumblr_posts")
+ def test_posts(self, get):
+ def get_posts(api_key, blogname, offset=0):
+ if offset > 0:
+ return []
+
+ return [
+ {
+ "type": "photo",
+ "blog_name": "testy",
+ "date": "2019-11-07 21:26:40 GMT",
+ "timestamp": 1573162000,
+ "format": "html",
+ "slug": "a-slug",
+ "tags": [
+ "economics"
+ ],
+ "state": "published",
+
+ "photos": [
+ {
+ "caption": "",
+ "original_size": {
+ "url": "https://..fccdc2360ba7182a.jpg",
+ "width": 634,
+ "height": 789
+ },
+ }]
+ }
+ ]
+ get.side_effect = get_posts
+
+ posts = list(tumblr2fields("api_key", "blogname"))
+ self.assertEqual(
+ [('Photo',
+ '<img alt="" src="https://..fccdc2360ba7182a.jpg" />\n',
+ '2019-11-07-a-slug', '2019-11-07 21:26:40', 'testy', ['photo'],
+ ['economics'], 'published', 'article', 'html')],
+ posts,
+ posts)
| Import yields incorrect link for Tumblr image posts
<!--
Hi there! Thank you for discovering and submitting an issue.
Before you submit this, let’s make sure of a few things.
Please make sure the following boxes are ticked if they are correct.
If not, please try and fulfill them first.
-->
<!-- Checked checkbox should look like this: [x] -->
- [x] I have read the [Filing Issues](https://docs.getpelican.com/en/latest/contribute.html#filing-issues) and subsequent “How to Get Help” sections of the documentation.
- [x] I have searched the [issues](https://github.com/getpelican/pelican/issues?q=is%3Aissue) (including closed ones) and believe that this is not a duplicate.
<!--
Once the above boxes are checked, if you are able to fill in the following list
with your information, it would be very helpful for maintainers.
-->
- **OS version and name**: OS X 12.6.7
- **Python version**: 3.11.4
- **Pelican version**: 4.8.0
- **Link to theme**: default
- **Links to plugins**: default
- **Link to your site**: n/a
- **Link to your source**: n/a
- **Link to a [Gist](https://gist.github.com/) with the contents of your settings file**: n/a
## Issue
<!--
Now feel free to write your issue. Please avoid vague phrases like “[…] doesn’t work”.
Be descriptive! Thanks again 🙌 ❤️
-->
Importing tumblr blog entry with an image produces an incorrect link.
For example, https://www.tumblr.com/boxydog/188887119699/a-nice-visualization-of-whats-been-getting produces the link all spread out one letter at a time (see below). I bet the python code is iterating over a string when it thinks it's iterating over a list.
First 50 lines of the .rst file:
```
$ head -50 boxydog.com/2019-11-07-a-nice-visualization-of-whats-been-getting.rst
blog.bluecrossmn.com
####################
:date: 2019-11-07 15:26:40
:author: boxydog
:category: photo
:tags: economics
:slug: 2019-11-07-a-nice-visualization-of-whats-been-getting
:status: published
<
i
m
g
a
l
t
=
"
"
s
r
c
=
"
h
t
t
p
s
:
```
| 2023-10-25T23:38:55Z | [] | [] |
|
getpelican/pelican | 3,231 | getpelican__pelican-3231 | [
"3227",
"3123"
] | bfb258769710331098665efe8b29d211bcc4af55 | diff --git a/pelican/__init__.py b/pelican/__init__.py
--- a/pelican/__init__.py
+++ b/pelican/__init__.py
@@ -9,19 +9,25 @@
import time
import traceback
from collections.abc import Iterable
+
# Combines all paths to `pelican` package accessible from `sys.path`
# Makes it possible to install `pelican` and namespace plugins into different
# locations in the file system (e.g. pip with `-e` or `--user`)
from pkgutil import extend_path
+
__path__ = extend_path(__path__, __name__)
# pelican.log has to be the first pelican module to be loaded
# because logging.setLoggerClass has to be called before logging.getLogger
from pelican.log import console
from pelican.log import init as init_logging
-from pelican.generators import (ArticlesGenerator, # noqa: I100
- PagesGenerator, SourceFileGenerator,
- StaticGenerator, TemplatePagesGenerator)
+from pelican.generators import (
+ ArticlesGenerator, # noqa: I100
+ PagesGenerator,
+ SourceFileGenerator,
+ StaticGenerator,
+ TemplatePagesGenerator,
+)
from pelican.plugins import signals
from pelican.plugins._utils import get_plugin_name, load_plugins
from pelican.readers import Readers
@@ -35,12 +41,11 @@
except Exception:
__version__ = "unknown"
-DEFAULT_CONFIG_NAME = 'pelicanconf.py'
+DEFAULT_CONFIG_NAME = "pelicanconf.py"
logger = logging.getLogger(__name__)
class Pelican:
-
def __init__(self, settings):
"""Pelican initialization
@@ -50,35 +55,34 @@ def __init__(self, settings):
# define the default settings
self.settings = settings
- self.path = settings['PATH']
- self.theme = settings['THEME']
- self.output_path = settings['OUTPUT_PATH']
- self.ignore_files = settings['IGNORE_FILES']
- self.delete_outputdir = settings['DELETE_OUTPUT_DIRECTORY']
- self.output_retention = settings['OUTPUT_RETENTION']
+ self.path = settings["PATH"]
+ self.theme = settings["THEME"]
+ self.output_path = settings["OUTPUT_PATH"]
+ self.ignore_files = settings["IGNORE_FILES"]
+ self.delete_outputdir = settings["DELETE_OUTPUT_DIRECTORY"]
+ self.output_retention = settings["OUTPUT_RETENTION"]
self.init_path()
self.init_plugins()
signals.initialized.send(self)
def init_path(self):
- if not any(p in sys.path for p in ['', os.curdir]):
+ if not any(p in sys.path for p in ["", os.curdir]):
logger.debug("Adding current directory to system path")
- sys.path.insert(0, '')
+ sys.path.insert(0, "")
def init_plugins(self):
self.plugins = []
for plugin in load_plugins(self.settings):
name = get_plugin_name(plugin)
- logger.debug('Registering plugin `%s`', name)
+ logger.debug("Registering plugin `%s`", name)
try:
plugin.register()
self.plugins.append(plugin)
except Exception as e:
- logger.error('Cannot register plugin `%s`\n%s',
- name, e)
+ logger.error("Cannot register plugin `%s`\n%s", name, e)
- self.settings['PLUGINS'] = [get_plugin_name(p) for p in self.plugins]
+ self.settings["PLUGINS"] = [get_plugin_name(p) for p in self.plugins]
def run(self):
"""Run the generators and return"""
@@ -87,10 +91,10 @@ def run(self):
context = self.settings.copy()
# Share these among all the generators and content objects
# They map source paths to Content objects or None
- context['generated_content'] = {}
- context['static_links'] = set()
- context['static_content'] = {}
- context['localsiteurl'] = self.settings['SITEURL']
+ context["generated_content"] = {}
+ context["static_links"] = set()
+ context["static_content"] = {}
+ context["localsiteurl"] = self.settings["SITEURL"]
generators = [
cls(
@@ -99,23 +103,25 @@ def run(self):
path=self.path,
theme=self.theme,
output_path=self.output_path,
- ) for cls in self._get_generator_classes()
+ )
+ for cls in self._get_generator_classes()
]
# Delete the output directory if (1) the appropriate setting is True
# and (2) that directory is not the parent of the source directory
- if (self.delete_outputdir
- and os.path.commonpath([os.path.realpath(self.output_path)]) !=
- os.path.commonpath([os.path.realpath(self.output_path),
- os.path.realpath(self.path)])):
+ if self.delete_outputdir and os.path.commonpath(
+ [os.path.realpath(self.output_path)]
+ ) != os.path.commonpath(
+ [os.path.realpath(self.output_path), os.path.realpath(self.path)]
+ ):
clean_output_dir(self.output_path, self.output_retention)
for p in generators:
- if hasattr(p, 'generate_context'):
+ if hasattr(p, "generate_context"):
p.generate_context()
for p in generators:
- if hasattr(p, 'refresh_metadata_intersite_links'):
+ if hasattr(p, "refresh_metadata_intersite_links"):
p.refresh_metadata_intersite_links()
signals.all_generators_finalized.send(generators)
@@ -123,61 +129,75 @@ def run(self):
writer = self._get_writer()
for p in generators:
- if hasattr(p, 'generate_output'):
+ if hasattr(p, "generate_output"):
p.generate_output(writer)
signals.finalized.send(self)
- articles_generator = next(g for g in generators
- if isinstance(g, ArticlesGenerator))
- pages_generator = next(g for g in generators
- if isinstance(g, PagesGenerator))
+ articles_generator = next(
+ g for g in generators if isinstance(g, ArticlesGenerator)
+ )
+ pages_generator = next(g for g in generators if isinstance(g, PagesGenerator))
pluralized_articles = maybe_pluralize(
- (len(articles_generator.articles) +
- len(articles_generator.translations)),
- 'article',
- 'articles')
+ (len(articles_generator.articles) + len(articles_generator.translations)),
+ "article",
+ "articles",
+ )
pluralized_drafts = maybe_pluralize(
- (len(articles_generator.drafts) +
- len(articles_generator.drafts_translations)),
- 'draft',
- 'drafts')
+ (
+ len(articles_generator.drafts)
+ + len(articles_generator.drafts_translations)
+ ),
+ "draft",
+ "drafts",
+ )
pluralized_hidden_articles = maybe_pluralize(
- (len(articles_generator.hidden_articles) +
- len(articles_generator.hidden_translations)),
- 'hidden article',
- 'hidden articles')
+ (
+ len(articles_generator.hidden_articles)
+ + len(articles_generator.hidden_translations)
+ ),
+ "hidden article",
+ "hidden articles",
+ )
pluralized_pages = maybe_pluralize(
- (len(pages_generator.pages) +
- len(pages_generator.translations)),
- 'page',
- 'pages')
+ (len(pages_generator.pages) + len(pages_generator.translations)),
+ "page",
+ "pages",
+ )
pluralized_hidden_pages = maybe_pluralize(
- (len(pages_generator.hidden_pages) +
- len(pages_generator.hidden_translations)),
- 'hidden page',
- 'hidden pages')
+ (
+ len(pages_generator.hidden_pages)
+ + len(pages_generator.hidden_translations)
+ ),
+ "hidden page",
+ "hidden pages",
+ )
pluralized_draft_pages = maybe_pluralize(
- (len(pages_generator.draft_pages) +
- len(pages_generator.draft_translations)),
- 'draft page',
- 'draft pages')
-
- console.print('Done: Processed {}, {}, {}, {}, {} and {} in {:.2f} seconds.'
- .format(
- pluralized_articles,
- pluralized_drafts,
- pluralized_hidden_articles,
- pluralized_pages,
- pluralized_hidden_pages,
- pluralized_draft_pages,
- time.time() - start_time))
+ (
+ len(pages_generator.draft_pages)
+ + len(pages_generator.draft_translations)
+ ),
+ "draft page",
+ "draft pages",
+ )
+
+ console.print(
+ "Done: Processed {}, {}, {}, {}, {} and {} in {:.2f} seconds.".format(
+ pluralized_articles,
+ pluralized_drafts,
+ pluralized_hidden_articles,
+ pluralized_pages,
+ pluralized_hidden_pages,
+ pluralized_draft_pages,
+ time.time() - start_time,
+ )
+ )
def _get_generator_classes(self):
discovered_generators = [
(ArticlesGenerator, "internal"),
- (PagesGenerator, "internal")
+ (PagesGenerator, "internal"),
]
if self.settings["TEMPLATE_PAGES"]:
@@ -236,7 +256,7 @@ def __call__(self, parser, namespace, values, option_string):
except Exception as e:
logger.critical("%s: %s", e.__class__.__name__, e)
console.print_exception()
- sys.exit(getattr(e, 'exitcode', 1))
+ sys.exit(getattr(e, "exitcode", 1))
if values:
# One or more arguments provided, so only print those settings
@@ -244,14 +264,16 @@ def __call__(self, parser, namespace, values, option_string):
if setting in settings:
# Only add newline between setting name and value if dict
if isinstance(settings[setting], (dict, tuple, list)):
- setting_format = '\n{}:\n{}'
+ setting_format = "\n{}:\n{}"
else:
- setting_format = '\n{}: {}'
- console.print(setting_format.format(
- setting,
- pprint.pformat(settings[setting])))
+ setting_format = "\n{}: {}"
+ console.print(
+ setting_format.format(
+ setting, pprint.pformat(settings[setting])
+ )
+ )
else:
- console.print('\n{} is not a recognized setting.'.format(setting))
+ console.print("\n{} is not a recognized setting.".format(setting))
break
else:
# No argument was given to --print-settings, so print all settings
@@ -268,170 +290,258 @@ def __call__(self, parser, namespace, values, option_string=None):
k, v = item.split("=", 1)
except ValueError:
raise ValueError(
- 'Extra settings must be specified as KEY=VALUE pairs '
- f'but you specified {item}'
+ "Extra settings must be specified as KEY=VALUE pairs "
+ f"but you specified {item}"
)
try:
overrides[k] = json.loads(v)
except json.decoder.JSONDecodeError:
raise ValueError(
- f'Invalid JSON value: {v}. '
- 'Values specified via -e / --extra-settings flags '
- 'must be in JSON notation. '
- 'Use -e KEY=\'"string"\' to specify a string value; '
- '-e KEY=null to specify None; '
- '-e KEY=false (or true) to specify False (or True).'
+ f"Invalid JSON value: {v}. "
+ "Values specified via -e / --extra-settings flags "
+ "must be in JSON notation. "
+ "Use -e KEY='\"string\"' to specify a string value; "
+ "-e KEY=null to specify None; "
+ "-e KEY=false (or true) to specify False (or True)."
)
setattr(namespace, self.dest, overrides)
def parse_arguments(argv=None):
parser = argparse.ArgumentParser(
- description='A tool to generate a static blog, '
- ' with restructured text input files.',
- formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ description="A tool to generate a static blog, "
+ " with restructured text input files.",
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ )
+
+ parser.add_argument(
+ dest="path",
+ nargs="?",
+ help="Path where to find the content files.",
+ default=None,
+ )
+
+ parser.add_argument(
+ "-t",
+ "--theme-path",
+ dest="theme",
+ help="Path where to find the theme templates. If not "
+ "specified, it will use the default one included with "
+ "pelican.",
+ )
+
+ parser.add_argument(
+ "-o",
+ "--output",
+ dest="output",
+ help="Where to output the generated files. If not "
+ "specified, a directory will be created, named "
+ '"output" in the current path.',
+ )
+
+ parser.add_argument(
+ "-s",
+ "--settings",
+ dest="settings",
+ help="The settings of the application, this is "
+ "automatically set to {} if a file exists with this "
+ "name.".format(DEFAULT_CONFIG_NAME),
+ )
+
+ parser.add_argument(
+ "-d",
+ "--delete-output-directory",
+ dest="delete_outputdir",
+ action="store_true",
+ default=None,
+ help="Delete the output directory.",
+ )
+
+ parser.add_argument(
+ "-v",
+ "--verbose",
+ action="store_const",
+ const=logging.INFO,
+ dest="verbosity",
+ help="Show all messages.",
+ )
+
+ parser.add_argument(
+ "-q",
+ "--quiet",
+ action="store_const",
+ const=logging.CRITICAL,
+ dest="verbosity",
+ help="Show only critical errors.",
+ )
+
+ parser.add_argument(
+ "-D",
+ "--debug",
+ action="store_const",
+ const=logging.DEBUG,
+ dest="verbosity",
+ help="Show all messages, including debug messages.",
+ )
+
+ parser.add_argument(
+ "--version",
+ action="version",
+ version=__version__,
+ help="Print the pelican version and exit.",
)
- parser.add_argument(dest='path', nargs='?',
- help='Path where to find the content files.',
- default=None)
-
- parser.add_argument('-t', '--theme-path', dest='theme',
- help='Path where to find the theme templates. If not '
- 'specified, it will use the default one included with '
- 'pelican.')
-
- parser.add_argument('-o', '--output', dest='output',
- help='Where to output the generated files. If not '
- 'specified, a directory will be created, named '
- '"output" in the current path.')
-
- parser.add_argument('-s', '--settings', dest='settings',
- help='The settings of the application, this is '
- 'automatically set to {} if a file exists with this '
- 'name.'.format(DEFAULT_CONFIG_NAME))
-
- parser.add_argument('-d', '--delete-output-directory',
- dest='delete_outputdir', action='store_true',
- default=None, help='Delete the output directory.')
-
- parser.add_argument('-v', '--verbose', action='store_const',
- const=logging.INFO, dest='verbosity',
- help='Show all messages.')
-
- parser.add_argument('-q', '--quiet', action='store_const',
- const=logging.CRITICAL, dest='verbosity',
- help='Show only critical errors.')
-
- parser.add_argument('-D', '--debug', action='store_const',
- const=logging.DEBUG, dest='verbosity',
- help='Show all messages, including debug messages.')
-
- parser.add_argument('--version', action='version', version=__version__,
- help='Print the pelican version and exit.')
-
- parser.add_argument('-r', '--autoreload', dest='autoreload',
- action='store_true',
- help='Relaunch pelican each time a modification occurs'
- ' on the content files.')
-
- parser.add_argument('--print-settings', dest='print_settings', nargs='*',
- action=PrintSettings, metavar='SETTING_NAME',
- help='Print current configuration settings and exit. '
- 'Append one or more setting name arguments to see the '
- 'values for specific settings only.')
-
- parser.add_argument('--relative-urls', dest='relative_paths',
- action='store_true',
- help='Use relative urls in output, '
- 'useful for site development')
-
- parser.add_argument('--cache-path', dest='cache_path',
- help=('Directory in which to store cache files. '
- 'If not specified, defaults to "cache".'))
-
- parser.add_argument('--ignore-cache', action='store_true',
- dest='ignore_cache', help='Ignore content cache '
- 'from previous runs by not loading cache files.')
-
- parser.add_argument('-w', '--write-selected', type=str,
- dest='selected_paths', default=None,
- help='Comma separated list of selected paths to write')
-
- parser.add_argument('--fatal', metavar='errors|warnings',
- choices=('errors', 'warnings'), default='',
- help=('Exit the program with non-zero status if any '
- 'errors/warnings encountered.'))
-
- parser.add_argument('--logs-dedup-min-level', default='WARNING',
- choices=('DEBUG', 'INFO', 'WARNING', 'ERROR'),
- help=('Only enable log de-duplication for levels equal'
- ' to or above the specified value'))
-
- parser.add_argument('-l', '--listen', dest='listen', action='store_true',
- help='Serve content files via HTTP and port 8000.')
-
- parser.add_argument('-p', '--port', dest='port', type=int,
- help='Port to serve HTTP files at. (default: 8000)')
-
- parser.add_argument('-b', '--bind', dest='bind',
- help='IP to bind to when serving files via HTTP '
- '(default: 127.0.0.1)')
-
- parser.add_argument('-e', '--extra-settings', dest='overrides',
- help='Specify one or more SETTING=VALUE pairs to '
- 'override settings. VALUE must be in JSON notation: '
- 'specify string values as SETTING=\'"some string"\'; '
- 'booleans as SETTING=true or SETTING=false; '
- 'None as SETTING=null.',
- nargs='*',
- action=ParseOverrides,
- default={})
+ parser.add_argument(
+ "-r",
+ "--autoreload",
+ dest="autoreload",
+ action="store_true",
+ help="Relaunch pelican each time a modification occurs"
+ " on the content files.",
+ )
+
+ parser.add_argument(
+ "--print-settings",
+ dest="print_settings",
+ nargs="*",
+ action=PrintSettings,
+ metavar="SETTING_NAME",
+ help="Print current configuration settings and exit. "
+ "Append one or more setting name arguments to see the "
+ "values for specific settings only.",
+ )
+
+ parser.add_argument(
+ "--relative-urls",
+ dest="relative_paths",
+ action="store_true",
+ help="Use relative urls in output, " "useful for site development",
+ )
+
+ parser.add_argument(
+ "--cache-path",
+ dest="cache_path",
+ help=(
+ "Directory in which to store cache files. "
+ 'If not specified, defaults to "cache".'
+ ),
+ )
+
+ parser.add_argument(
+ "--ignore-cache",
+ action="store_true",
+ dest="ignore_cache",
+ help="Ignore content cache " "from previous runs by not loading cache files.",
+ )
+
+ parser.add_argument(
+ "-w",
+ "--write-selected",
+ type=str,
+ dest="selected_paths",
+ default=None,
+ help="Comma separated list of selected paths to write",
+ )
+
+ parser.add_argument(
+ "--fatal",
+ metavar="errors|warnings",
+ choices=("errors", "warnings"),
+ default="",
+ help=(
+ "Exit the program with non-zero status if any "
+ "errors/warnings encountered."
+ ),
+ )
+
+ parser.add_argument(
+ "--logs-dedup-min-level",
+ default="WARNING",
+ choices=("DEBUG", "INFO", "WARNING", "ERROR"),
+ help=(
+ "Only enable log de-duplication for levels equal"
+ " to or above the specified value"
+ ),
+ )
+
+ parser.add_argument(
+ "-l",
+ "--listen",
+ dest="listen",
+ action="store_true",
+ help="Serve content files via HTTP and port 8000.",
+ )
+
+ parser.add_argument(
+ "-p",
+ "--port",
+ dest="port",
+ type=int,
+ help="Port to serve HTTP files at. (default: 8000)",
+ )
+
+ parser.add_argument(
+ "-b",
+ "--bind",
+ dest="bind",
+ help="IP to bind to when serving files via HTTP " "(default: 127.0.0.1)",
+ )
+
+ parser.add_argument(
+ "-e",
+ "--extra-settings",
+ dest="overrides",
+ help="Specify one or more SETTING=VALUE pairs to "
+ "override settings. VALUE must be in JSON notation: "
+ "specify string values as SETTING='\"some string\"'; "
+ "booleans as SETTING=true or SETTING=false; "
+ "None as SETTING=null.",
+ nargs="*",
+ action=ParseOverrides,
+ default={},
+ )
args = parser.parse_args(argv)
if args.port is not None and not args.listen:
- logger.warning('--port without --listen has no effect')
+ logger.warning("--port without --listen has no effect")
if args.bind is not None and not args.listen:
- logger.warning('--bind without --listen has no effect')
+ logger.warning("--bind without --listen has no effect")
return args
def get_config(args):
- """Builds a config dictionary based on supplied `args`.
- """
+ """Builds a config dictionary based on supplied `args`."""
config = {}
if args.path:
- config['PATH'] = os.path.abspath(os.path.expanduser(args.path))
+ config["PATH"] = os.path.abspath(os.path.expanduser(args.path))
if args.output:
- config['OUTPUT_PATH'] = \
- os.path.abspath(os.path.expanduser(args.output))
+ config["OUTPUT_PATH"] = os.path.abspath(os.path.expanduser(args.output))
if args.theme:
abstheme = os.path.abspath(os.path.expanduser(args.theme))
- config['THEME'] = abstheme if os.path.exists(abstheme) else args.theme
+ config["THEME"] = abstheme if os.path.exists(abstheme) else args.theme
if args.delete_outputdir is not None:
- config['DELETE_OUTPUT_DIRECTORY'] = args.delete_outputdir
+ config["DELETE_OUTPUT_DIRECTORY"] = args.delete_outputdir
if args.ignore_cache:
- config['LOAD_CONTENT_CACHE'] = False
+ config["LOAD_CONTENT_CACHE"] = False
if args.cache_path:
- config['CACHE_PATH'] = args.cache_path
+ config["CACHE_PATH"] = args.cache_path
if args.selected_paths:
- config['WRITE_SELECTED'] = args.selected_paths.split(',')
+ config["WRITE_SELECTED"] = args.selected_paths.split(",")
if args.relative_paths:
- config['RELATIVE_URLS'] = args.relative_paths
+ config["RELATIVE_URLS"] = args.relative_paths
if args.port is not None:
- config['PORT'] = args.port
+ config["PORT"] = args.port
if args.bind is not None:
- config['BIND'] = args.bind
- config['DEBUG'] = args.verbosity == logging.DEBUG
+ config["BIND"] = args.bind
+ config["DEBUG"] = args.verbosity == logging.DEBUG
config.update(args.overrides)
return config
def get_instance(args):
-
config_file = args.settings
if config_file is None and os.path.isfile(DEFAULT_CONFIG_NAME):
config_file = DEFAULT_CONFIG_NAME
@@ -439,9 +549,9 @@ def get_instance(args):
settings = read_settings(config_file, override=get_config(args))
- cls = settings['PELICAN_CLASS']
+ cls = settings["PELICAN_CLASS"]
if isinstance(cls, str):
- module, cls_name = cls.rsplit('.', 1)
+ module, cls_name = cls.rsplit(".", 1)
module = __import__(module)
cls = getattr(module, cls_name)
@@ -449,8 +559,10 @@ def get_instance(args):
def autoreload(args, excqueue=None):
- console.print(' --- AutoReload Mode: Monitoring `content`, `theme` and'
- ' `settings` for changes. ---')
+ console.print(
+ " --- AutoReload Mode: Monitoring `content`, `theme` and"
+ " `settings` for changes. ---"
+ )
pelican, settings = get_instance(args)
settings_file = os.path.abspath(args.settings)
while True:
@@ -463,8 +575,9 @@ def autoreload(args, excqueue=None):
if settings_file in changed_files:
pelican, settings = get_instance(args)
- console.print('\n-> Modified: {}. re-generating...'.format(
- ', '.join(changed_files)))
+ console.print(
+ "\n-> Modified: {}. re-generating...".format(", ".join(changed_files))
+ )
except KeyboardInterrupt:
if excqueue is not None:
@@ -473,15 +586,14 @@ def autoreload(args, excqueue=None):
raise
except Exception as e:
- if (args.verbosity == logging.DEBUG):
+ if args.verbosity == logging.DEBUG:
if excqueue is not None:
- excqueue.put(
- traceback.format_exception_only(type(e), e)[-1])
+ excqueue.put(traceback.format_exception_only(type(e), e)[-1])
else:
raise
logger.warning(
- 'Caught exception:\n"%s".', e,
- exc_info=settings.get('DEBUG', False))
+ 'Caught exception:\n"%s".', e, exc_info=settings.get("DEBUG", False)
+ )
def listen(server, port, output, excqueue=None):
@@ -491,8 +603,7 @@ def listen(server, port, output, excqueue=None):
RootedHTTPServer.allow_reuse_address = True
try:
- httpd = RootedHTTPServer(
- output, (server, port), ComplexHTTPRequestHandler)
+ httpd = RootedHTTPServer(output, (server, port), ComplexHTTPRequestHandler)
except OSError as e:
logging.error("Could not listen on port %s, server %s.", port, server)
if excqueue is not None:
@@ -500,8 +611,9 @@ def listen(server, port, output, excqueue=None):
return
try:
- console.print("Serving site at: http://{}:{} - Tap CTRL-C to stop".format(
- server, port))
+ console.print(
+ "Serving site at: http://{}:{} - Tap CTRL-C to stop".format(server, port)
+ )
httpd.serve_forever()
except Exception as e:
if excqueue is not None:
@@ -518,24 +630,31 @@ def listen(server, port, output, excqueue=None):
def main(argv=None):
args = parse_arguments(argv)
logs_dedup_min_level = getattr(logging, args.logs_dedup_min_level)
- init_logging(level=args.verbosity, fatal=args.fatal,
- name=__name__, logs_dedup_min_level=logs_dedup_min_level)
+ init_logging(
+ level=args.verbosity,
+ fatal=args.fatal,
+ name=__name__,
+ logs_dedup_min_level=logs_dedup_min_level,
+ )
- logger.debug('Pelican version: %s', __version__)
- logger.debug('Python version: %s', sys.version.split()[0])
+ logger.debug("Pelican version: %s", __version__)
+ logger.debug("Python version: %s", sys.version.split()[0])
try:
pelican, settings = get_instance(args)
if args.autoreload and args.listen:
excqueue = multiprocessing.Queue()
- p1 = multiprocessing.Process(
- target=autoreload,
- args=(args, excqueue))
+ p1 = multiprocessing.Process(target=autoreload, args=(args, excqueue))
p2 = multiprocessing.Process(
target=listen,
- args=(settings.get('BIND'), settings.get('PORT'),
- settings.get("OUTPUT_PATH"), excqueue))
+ args=(
+ settings.get("BIND"),
+ settings.get("PORT"),
+ settings.get("OUTPUT_PATH"),
+ excqueue,
+ ),
+ )
try:
p1.start()
p2.start()
@@ -548,16 +667,17 @@ def main(argv=None):
elif args.autoreload:
autoreload(args)
elif args.listen:
- listen(settings.get('BIND'), settings.get('PORT'),
- settings.get("OUTPUT_PATH"))
+ listen(
+ settings.get("BIND"), settings.get("PORT"), settings.get("OUTPUT_PATH")
+ )
else:
with console.status("Generating..."):
pelican.run()
except KeyboardInterrupt:
- logger.warning('Keyboard interrupt received. Exiting.')
+ logger.warning("Keyboard interrupt received. Exiting.")
except Exception as e:
logger.critical("%s: %s", e.__class__.__name__, e)
if args.verbosity == logging.DEBUG:
console.print_exception()
- sys.exit(getattr(e, 'exitcode', 1))
+ sys.exit(getattr(e, "exitcode", 1))
diff --git a/pelican/__main__.py b/pelican/__main__.py
--- a/pelican/__main__.py
+++ b/pelican/__main__.py
@@ -5,5 +5,5 @@
from . import main
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/pelican/cache.py b/pelican/cache.py
--- a/pelican/cache.py
+++ b/pelican/cache.py
@@ -19,29 +19,35 @@ def __init__(self, settings, cache_name, caching_policy, load_policy):
Sets caching policy according to *caching_policy*.
"""
self.settings = settings
- self._cache_path = os.path.join(self.settings['CACHE_PATH'],
- cache_name)
+ self._cache_path = os.path.join(self.settings["CACHE_PATH"], cache_name)
self._cache_data_policy = caching_policy
- if self.settings['GZIP_CACHE']:
+ if self.settings["GZIP_CACHE"]:
import gzip
+
self._cache_open = gzip.open
else:
self._cache_open = open
if load_policy:
try:
- with self._cache_open(self._cache_path, 'rb') as fhandle:
+ with self._cache_open(self._cache_path, "rb") as fhandle:
self._cache = pickle.load(fhandle)
except (OSError, UnicodeDecodeError) as err:
- logger.debug('Cannot load cache %s (this is normal on first '
- 'run). Proceeding with empty cache.\n%s',
- self._cache_path, err)
+ logger.debug(
+ "Cannot load cache %s (this is normal on first "
+ "run). Proceeding with empty cache.\n%s",
+ self._cache_path,
+ err,
+ )
self._cache = {}
except pickle.PickleError as err:
- logger.warning('Cannot unpickle cache %s, cache may be using '
- 'an incompatible protocol (see pelican '
- 'caching docs). '
- 'Proceeding with empty cache.\n%s',
- self._cache_path, err)
+ logger.warning(
+ "Cannot unpickle cache %s, cache may be using "
+ "an incompatible protocol (see pelican "
+ "caching docs). "
+ "Proceeding with empty cache.\n%s",
+ self._cache_path,
+ err,
+ )
self._cache = {}
else:
self._cache = {}
@@ -62,12 +68,13 @@ def save_cache(self):
"""Save the updated cache"""
if self._cache_data_policy:
try:
- mkdir_p(self.settings['CACHE_PATH'])
- with self._cache_open(self._cache_path, 'wb') as fhandle:
+ mkdir_p(self.settings["CACHE_PATH"])
+ with self._cache_open(self._cache_path, "wb") as fhandle:
pickle.dump(self._cache, fhandle)
except (OSError, pickle.PicklingError, TypeError) as err:
- logger.warning('Could not save cache %s\n ... %s',
- self._cache_path, err)
+ logger.warning(
+ "Could not save cache %s\n ... %s", self._cache_path, err
+ )
class FileStampDataCacher(FileDataCacher):
@@ -80,8 +87,8 @@ def __init__(self, settings, cache_name, caching_policy, load_policy):
super().__init__(settings, cache_name, caching_policy, load_policy)
- method = self.settings['CHECK_MODIFIED_METHOD']
- if method == 'mtime':
+ method = self.settings["CHECK_MODIFIED_METHOD"]
+ if method == "mtime":
self._filestamp_func = os.path.getmtime
else:
try:
@@ -89,12 +96,12 @@ def __init__(self, settings, cache_name, caching_policy, load_policy):
def filestamp_func(filename):
"""return hash of file contents"""
- with open(filename, 'rb') as fhandle:
+ with open(filename, "rb") as fhandle:
return hash_func(fhandle.read()).digest()
self._filestamp_func = filestamp_func
except AttributeError as err:
- logger.warning('Could not get hashing function\n\t%s', err)
+ logger.warning("Could not get hashing function\n\t%s", err)
self._filestamp_func = None
def cache_data(self, filename, data):
@@ -115,9 +122,8 @@ def _get_file_stamp(self, filename):
try:
return self._filestamp_func(filename)
except (OSError, TypeError) as err:
- logger.warning('Cannot get modification stamp for %s\n\t%s',
- filename, err)
- return ''
+ logger.warning("Cannot get modification stamp for %s\n\t%s", filename, err)
+ return ""
def get_cached_data(self, filename, default=None):
"""Get the cached data for the given filename
diff --git a/pelican/contents.py b/pelican/contents.py
--- a/pelican/contents.py
+++ b/pelican/contents.py
@@ -16,12 +16,19 @@
from pelican.plugins import signals
from pelican.settings import DEFAULT_CONFIG
-from pelican.utils import (deprecated_attribute, memoized, path_to_url,
- posixize_path, sanitised_join, set_date_tzinfo,
- slugify, truncate_html_words)
+from pelican.utils import (
+ deprecated_attribute,
+ memoized,
+ path_to_url,
+ posixize_path,
+ sanitised_join,
+ set_date_tzinfo,
+ slugify,
+ truncate_html_words,
+)
# Import these so that they're available when you import from pelican.contents.
-from pelican.urlwrappers import (Author, Category, Tag, URLWrapper) # NOQA
+from pelican.urlwrappers import Author, Category, Tag, URLWrapper # NOQA
logger = logging.getLogger(__name__)
@@ -36,12 +43,14 @@ class Content:
:param context: The shared context between generators.
"""
- @deprecated_attribute(old='filename', new='source_path', since=(3, 2, 0))
+
+ @deprecated_attribute(old="filename", new="source_path", since=(3, 2, 0))
def filename():
return None
- def __init__(self, content, metadata=None, settings=None,
- source_path=None, context=None):
+ def __init__(
+ self, content, metadata=None, settings=None, source_path=None, context=None
+ ):
if metadata is None:
metadata = {}
if settings is None:
@@ -59,8 +68,8 @@ def __init__(self, content, metadata=None, settings=None,
# set metadata as attributes
for key, value in local_metadata.items():
- if key in ('save_as', 'url'):
- key = 'override_' + key
+ if key in ("save_as", "url"):
+ key = "override_" + key
setattr(self, key.lower(), value)
# also keep track of the metadata attributes available
@@ -71,53 +80,52 @@ def __init__(self, content, metadata=None, settings=None,
# First, read the authors from "authors", if not, fallback to "author"
# and if not use the settings defined one, if any.
- if not hasattr(self, 'author'):
- if hasattr(self, 'authors'):
+ if not hasattr(self, "author"):
+ if hasattr(self, "authors"):
self.author = self.authors[0]
- elif 'AUTHOR' in settings:
- self.author = Author(settings['AUTHOR'], settings)
+ elif "AUTHOR" in settings:
+ self.author = Author(settings["AUTHOR"], settings)
- if not hasattr(self, 'authors') and hasattr(self, 'author'):
+ if not hasattr(self, "authors") and hasattr(self, "author"):
self.authors = [self.author]
# XXX Split all the following code into pieces, there is too much here.
# manage languages
self.in_default_lang = True
- if 'DEFAULT_LANG' in settings:
- default_lang = settings['DEFAULT_LANG'].lower()
- if not hasattr(self, 'lang'):
+ if "DEFAULT_LANG" in settings:
+ default_lang = settings["DEFAULT_LANG"].lower()
+ if not hasattr(self, "lang"):
self.lang = default_lang
- self.in_default_lang = (self.lang == default_lang)
+ self.in_default_lang = self.lang == default_lang
# create the slug if not existing, generate slug according to
# setting of SLUG_ATTRIBUTE
- if not hasattr(self, 'slug'):
- if (settings['SLUGIFY_SOURCE'] == 'title' and
- hasattr(self, 'title')):
+ if not hasattr(self, "slug"):
+ if settings["SLUGIFY_SOURCE"] == "title" and hasattr(self, "title"):
value = self.title
- elif (settings['SLUGIFY_SOURCE'] == 'basename' and
- source_path is not None):
+ elif settings["SLUGIFY_SOURCE"] == "basename" and source_path is not None:
value = os.path.basename(os.path.splitext(source_path)[0])
else:
value = None
if value is not None:
self.slug = slugify(
value,
- regex_subs=settings.get('SLUG_REGEX_SUBSTITUTIONS', []),
- preserve_case=settings.get('SLUGIFY_PRESERVE_CASE', False),
- use_unicode=settings.get('SLUGIFY_USE_UNICODE', False))
+ regex_subs=settings.get("SLUG_REGEX_SUBSTITUTIONS", []),
+ preserve_case=settings.get("SLUGIFY_PRESERVE_CASE", False),
+ use_unicode=settings.get("SLUGIFY_USE_UNICODE", False),
+ )
self.source_path = source_path
self.relative_source_path = self.get_relative_source_path()
# manage the date format
- if not hasattr(self, 'date_format'):
- if hasattr(self, 'lang') and self.lang in settings['DATE_FORMATS']:
- self.date_format = settings['DATE_FORMATS'][self.lang]
+ if not hasattr(self, "date_format"):
+ if hasattr(self, "lang") and self.lang in settings["DATE_FORMATS"]:
+ self.date_format = settings["DATE_FORMATS"][self.lang]
else:
- self.date_format = settings['DEFAULT_DATE_FORMAT']
+ self.date_format = settings["DEFAULT_DATE_FORMAT"]
if isinstance(self.date_format, tuple):
locale_string = self.date_format[0]
@@ -129,22 +137,22 @@ def __init__(self, content, metadata=None, settings=None,
timezone = getattr(self, "timezone", default_timezone)
self.timezone = ZoneInfo(timezone)
- if hasattr(self, 'date'):
+ if hasattr(self, "date"):
self.date = set_date_tzinfo(self.date, timezone)
self.locale_date = self.date.strftime(self.date_format)
- if hasattr(self, 'modified'):
+ if hasattr(self, "modified"):
self.modified = set_date_tzinfo(self.modified, timezone)
self.locale_modified = self.modified.strftime(self.date_format)
# manage status
- if not hasattr(self, 'status'):
+ if not hasattr(self, "status"):
# Previous default of None broke comment plugins and perhaps others
- self.status = getattr(self, 'default_status', '')
+ self.status = getattr(self, "default_status", "")
# store the summary metadata if it is set
- if 'summary' in metadata:
- self._summary = metadata['summary']
+ if "summary" in metadata:
+ self._summary = metadata["summary"]
signals.content_object_init.send(self)
@@ -156,8 +164,8 @@ def _has_valid_mandatory_properties(self):
for prop in self.mandatory_properties:
if not hasattr(self, prop):
logger.error(
- "Skipping %s: could not find information about '%s'",
- self, prop)
+ "Skipping %s: could not find information about '%s'", self, prop
+ )
return False
return True
@@ -183,12 +191,13 @@ def _has_valid_save_as(self):
return True
def _has_valid_status(self):
- if hasattr(self, 'allowed_statuses'):
+ if hasattr(self, "allowed_statuses"):
if self.status not in self.allowed_statuses:
logger.error(
"Unknown status '%s' for file %s, skipping it. (Not in %s)",
self.status,
- self, self.allowed_statuses
+ self,
+ self.allowed_statuses,
)
return False
@@ -198,42 +207,48 @@ def _has_valid_status(self):
def is_valid(self):
"""Validate Content"""
# Use all() to not short circuit and get results of all validations
- return all([self._has_valid_mandatory_properties(),
- self._has_valid_save_as(),
- self._has_valid_status()])
+ return all(
+ [
+ self._has_valid_mandatory_properties(),
+ self._has_valid_save_as(),
+ self._has_valid_status(),
+ ]
+ )
@property
def url_format(self):
"""Returns the URL, formatted with the proper values"""
metadata = copy.copy(self.metadata)
- path = self.metadata.get('path', self.get_relative_source_path())
- metadata.update({
- 'path': path_to_url(path),
- 'slug': getattr(self, 'slug', ''),
- 'lang': getattr(self, 'lang', 'en'),
- 'date': getattr(self, 'date', datetime.datetime.now()),
- 'author': self.author.slug if hasattr(self, 'author') else '',
- 'category': self.category.slug if hasattr(self, 'category') else ''
- })
+ path = self.metadata.get("path", self.get_relative_source_path())
+ metadata.update(
+ {
+ "path": path_to_url(path),
+ "slug": getattr(self, "slug", ""),
+ "lang": getattr(self, "lang", "en"),
+ "date": getattr(self, "date", datetime.datetime.now()),
+ "author": self.author.slug if hasattr(self, "author") else "",
+ "category": self.category.slug if hasattr(self, "category") else "",
+ }
+ )
return metadata
def _expand_settings(self, key, klass=None):
if not klass:
klass = self.__class__.__name__
- fq_key = ('{}_{}'.format(klass, key)).upper()
+ fq_key = ("{}_{}".format(klass, key)).upper()
return str(self.settings[fq_key]).format(**self.url_format)
def get_url_setting(self, key):
- if hasattr(self, 'override_' + key):
- return getattr(self, 'override_' + key)
- key = key if self.in_default_lang else 'lang_%s' % key
+ if hasattr(self, "override_" + key):
+ return getattr(self, "override_" + key)
+ key = key if self.in_default_lang else "lang_%s" % key
return self._expand_settings(key)
def _link_replacer(self, siteurl, m):
- what = m.group('what')
- value = urlparse(m.group('value'))
+ what = m.group("what")
+ value = urlparse(m.group("value"))
path = value.path
- origin = m.group('path')
+ origin = m.group("path")
# urllib.parse.urljoin() produces `a.html` for urljoin("..", "a.html")
# so if RELATIVE_URLS are enabled, we fall back to os.path.join() to
@@ -241,7 +256,7 @@ def _link_replacer(self, siteurl, m):
# `baz/http://foo/bar.html` for join("baz", "http://foo/bar.html")
# instead of correct "http://foo/bar.html", so one has to pick a side
# as there is no silver bullet.
- if self.settings['RELATIVE_URLS']:
+ if self.settings["RELATIVE_URLS"]:
joiner = os.path.join
else:
joiner = urljoin
@@ -251,16 +266,17 @@ def _link_replacer(self, siteurl, m):
# os.path.join()), so in order to get a correct answer one needs to
# append a trailing slash to siteurl in that case. This also makes
# the new behavior fully compatible with Pelican 3.7.1.
- if not siteurl.endswith('/'):
- siteurl += '/'
+ if not siteurl.endswith("/"):
+ siteurl += "/"
# XXX Put this in a different location.
- if what in {'filename', 'static', 'attach'}:
+ if what in {"filename", "static", "attach"}:
+
def _get_linked_content(key, url):
nonlocal value
def _find_path(path):
- if path.startswith('/'):
+ if path.startswith("/"):
path = path[1:]
else:
# relative to the source path of this content
@@ -287,59 +303,64 @@ def _find_path(path):
return result
# check if a static file is linked with {filename}
- if what == 'filename' and key == 'generated_content':
- linked_content = _get_linked_content('static_content', value)
+ if what == "filename" and key == "generated_content":
+ linked_content = _get_linked_content("static_content", value)
if linked_content:
logger.warning(
- '{filename} used for linking to static'
- ' content %s in %s. Use {static} instead',
+ "{filename} used for linking to static"
+ " content %s in %s. Use {static} instead",
value.path,
- self.get_relative_source_path())
+ self.get_relative_source_path(),
+ )
return linked_content
return None
- if what == 'filename':
- key = 'generated_content'
+ if what == "filename":
+ key = "generated_content"
else:
- key = 'static_content'
+ key = "static_content"
linked_content = _get_linked_content(key, value)
if linked_content:
- if what == 'attach':
+ if what == "attach":
linked_content.attach_to(self)
origin = joiner(siteurl, linked_content.url)
- origin = origin.replace('\\', '/') # for Windows paths.
+ origin = origin.replace("\\", "/") # for Windows paths.
else:
logger.warning(
"Unable to find '%s', skipping url replacement.",
- value.geturl(), extra={
- 'limit_msg': ("Other resources were not found "
- "and their urls not replaced")})
- elif what == 'category':
+ value.geturl(),
+ extra={
+ "limit_msg": (
+ "Other resources were not found "
+ "and their urls not replaced"
+ )
+ },
+ )
+ elif what == "category":
origin = joiner(siteurl, Category(path, self.settings).url)
- elif what == 'tag':
+ elif what == "tag":
origin = joiner(siteurl, Tag(path, self.settings).url)
- elif what == 'index':
- origin = joiner(siteurl, self.settings['INDEX_SAVE_AS'])
- elif what == 'author':
+ elif what == "index":
+ origin = joiner(siteurl, self.settings["INDEX_SAVE_AS"])
+ elif what == "author":
origin = joiner(siteurl, Author(path, self.settings).url)
else:
logger.warning(
- "Replacement Indicator '%s' not recognized, "
- "skipping replacement",
- what)
+ "Replacement Indicator '%s' not recognized, " "skipping replacement",
+ what,
+ )
# keep all other parts, such as query, fragment, etc.
parts = list(value)
parts[2] = origin
origin = urlunparse(parts)
- return ''.join((m.group('markup'), m.group('quote'), origin,
- m.group('quote')))
+ return "".join((m.group("markup"), m.group("quote"), origin, m.group("quote")))
def _get_intrasite_link_regex(self):
- intrasite_link_regex = self.settings['INTRASITE_LINK_REGEX']
+ intrasite_link_regex = self.settings["INTRASITE_LINK_REGEX"]
regex = r"""
(?P<markup><[^\>]+ # match tag with all url-value attributes
(?:href|src|poster|data|cite|formaction|action|content)\s*=\s*)
@@ -369,28 +390,28 @@ def get_static_links(self):
static_links = set()
hrefs = self._get_intrasite_link_regex()
for m in hrefs.finditer(self._content):
- what = m.group('what')
- value = urlparse(m.group('value'))
+ what = m.group("what")
+ value = urlparse(m.group("value"))
path = value.path
- if what not in {'static', 'attach'}:
+ if what not in {"static", "attach"}:
continue
- if path.startswith('/'):
+ if path.startswith("/"):
path = path[1:]
else:
# relative to the source path of this content
path = self.get_relative_source_path(
os.path.join(self.relative_dir, path)
)
- path = path.replace('%20', ' ')
+ path = path.replace("%20", " ")
static_links.add(path)
return static_links
def get_siteurl(self):
- return self._context.get('localsiteurl', '')
+ return self._context.get("localsiteurl", "")
@memoized
def get_content(self, siteurl):
- if hasattr(self, '_get_content'):
+ if hasattr(self, "_get_content"):
content = self._get_content()
else:
content = self._content
@@ -407,15 +428,17 @@ def get_summary(self, siteurl):
This is based on the summary metadata if set, otherwise truncate the
content.
"""
- if 'summary' in self.metadata:
- return self.metadata['summary']
+ if "summary" in self.metadata:
+ return self.metadata["summary"]
- if self.settings['SUMMARY_MAX_LENGTH'] is None:
+ if self.settings["SUMMARY_MAX_LENGTH"] is None:
return self.content
- return truncate_html_words(self.content,
- self.settings['SUMMARY_MAX_LENGTH'],
- self.settings['SUMMARY_END_SUFFIX'])
+ return truncate_html_words(
+ self.content,
+ self.settings["SUMMARY_MAX_LENGTH"],
+ self.settings["SUMMARY_END_SUFFIX"],
+ )
@property
def summary(self):
@@ -424,8 +447,10 @@ def summary(self):
def _get_summary(self):
"""deprecated function to access summary"""
- logger.warning('_get_summary() has been deprecated since 3.6.4. '
- 'Use the summary decorator instead')
+ logger.warning(
+ "_get_summary() has been deprecated since 3.6.4. "
+ "Use the summary decorator instead"
+ )
return self.summary
@summary.setter
@@ -444,14 +469,14 @@ def status(self, value):
@property
def url(self):
- return self.get_url_setting('url')
+ return self.get_url_setting("url")
@property
def save_as(self):
- return self.get_url_setting('save_as')
+ return self.get_url_setting("save_as")
def _get_template(self):
- if hasattr(self, 'template') and self.template is not None:
+ if hasattr(self, "template") and self.template is not None:
return self.template
else:
return self.default_template
@@ -470,11 +495,10 @@ def get_relative_source_path(self, source_path=None):
return posixize_path(
os.path.relpath(
- os.path.abspath(os.path.join(
- self.settings['PATH'],
- source_path)),
- os.path.abspath(self.settings['PATH'])
- ))
+ os.path.abspath(os.path.join(self.settings["PATH"], source_path)),
+ os.path.abspath(self.settings["PATH"]),
+ )
+ )
@property
def relative_dir(self):
@@ -482,85 +506,84 @@ def relative_dir(self):
os.path.dirname(
os.path.relpath(
os.path.abspath(self.source_path),
- os.path.abspath(self.settings['PATH']))))
+ os.path.abspath(self.settings["PATH"]),
+ )
+ )
+ )
def refresh_metadata_intersite_links(self):
- for key in self.settings['FORMATTED_FIELDS']:
- if key in self.metadata and key != 'summary':
- value = self._update_content(
- self.metadata[key],
- self.get_siteurl()
- )
+ for key in self.settings["FORMATTED_FIELDS"]:
+ if key in self.metadata and key != "summary":
+ value = self._update_content(self.metadata[key], self.get_siteurl())
self.metadata[key] = value
setattr(self, key.lower(), value)
# _summary is an internal variable that some plugins may be writing to,
# so ensure changes to it are picked up
- if ('summary' in self.settings['FORMATTED_FIELDS'] and
- 'summary' in self.metadata):
- self._summary = self._update_content(
- self._summary,
- self.get_siteurl()
- )
- self.metadata['summary'] = self._summary
+ if (
+ "summary" in self.settings["FORMATTED_FIELDS"]
+ and "summary" in self.metadata
+ ):
+ self._summary = self._update_content(self._summary, self.get_siteurl())
+ self.metadata["summary"] = self._summary
class Page(Content):
- mandatory_properties = ('title',)
- allowed_statuses = ('published', 'hidden', 'draft')
- default_status = 'published'
- default_template = 'page'
+ mandatory_properties = ("title",)
+ allowed_statuses = ("published", "hidden", "draft")
+ default_status = "published"
+ default_template = "page"
def _expand_settings(self, key):
- klass = 'draft_page' if self.status == 'draft' else None
+ klass = "draft_page" if self.status == "draft" else None
return super()._expand_settings(key, klass)
class Article(Content):
- mandatory_properties = ('title', 'date', 'category')
- allowed_statuses = ('published', 'hidden', 'draft')
- default_status = 'published'
- default_template = 'article'
+ mandatory_properties = ("title", "date", "category")
+ allowed_statuses = ("published", "hidden", "draft")
+ default_status = "published"
+ default_template = "article"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# handle WITH_FUTURE_DATES (designate article to draft based on date)
- if not self.settings['WITH_FUTURE_DATES'] and hasattr(self, 'date'):
+ if not self.settings["WITH_FUTURE_DATES"] and hasattr(self, "date"):
if self.date.tzinfo is None:
now = datetime.datetime.now()
else:
now = datetime.datetime.utcnow().replace(tzinfo=timezone.utc)
if self.date > now:
- self.status = 'draft'
+ self.status = "draft"
# if we are a draft and there is no date provided, set max datetime
- if not hasattr(self, 'date') and self.status == 'draft':
+ if not hasattr(self, "date") and self.status == "draft":
self.date = datetime.datetime.max.replace(tzinfo=self.timezone)
def _expand_settings(self, key):
- klass = 'draft' if self.status == 'draft' else 'article'
+ klass = "draft" if self.status == "draft" else "article"
return super()._expand_settings(key, klass)
class Static(Content):
- mandatory_properties = ('title',)
- default_status = 'published'
+ mandatory_properties = ("title",)
+ default_status = "published"
default_template = None
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._output_location_referenced = False
- @deprecated_attribute(old='filepath', new='source_path', since=(3, 2, 0))
+ @deprecated_attribute(old="filepath", new="source_path", since=(3, 2, 0))
def filepath():
return None
- @deprecated_attribute(old='src', new='source_path', since=(3, 2, 0))
+ @deprecated_attribute(old="src", new="source_path", since=(3, 2, 0))
def src():
return None
- @deprecated_attribute(old='dst', new='save_as', since=(3, 2, 0))
+ @deprecated_attribute(old="dst", new="save_as", since=(3, 2, 0))
def dst():
return None
@@ -577,8 +600,7 @@ def save_as(self):
return super().save_as
def attach_to(self, content):
- """Override our output directory with that of the given content object.
- """
+ """Override our output directory with that of the given content object."""
# Determine our file's new output path relative to the linking
# document. If it currently lives beneath the linking
@@ -589,8 +611,7 @@ def attach_to(self, content):
tail_path = os.path.relpath(self.source_path, linking_source_dir)
if tail_path.startswith(os.pardir + os.sep):
tail_path = os.path.basename(tail_path)
- new_save_as = os.path.join(
- os.path.dirname(content.save_as), tail_path)
+ new_save_as = os.path.join(os.path.dirname(content.save_as), tail_path)
# We do not build our new url by joining tail_path with the linking
# document's url, because we cannot know just by looking at the latter
@@ -609,12 +630,14 @@ def _log_reason(reason):
"%s because %s. Falling back to "
"{filename} link behavior instead.",
content.get_relative_source_path(),
- self.get_relative_source_path(), reason,
- extra={'limit_msg': "More {attach} warnings silenced."})
+ self.get_relative_source_path(),
+ reason,
+ extra={"limit_msg": "More {attach} warnings silenced."},
+ )
# We never override an override, because we don't want to interfere
# with user-defined overrides that might be in EXTRA_PATH_METADATA.
- if hasattr(self, 'override_save_as') or hasattr(self, 'override_url'):
+ if hasattr(self, "override_save_as") or hasattr(self, "override_url"):
if new_save_as != self.save_as or new_url != self.url:
_log_reason("its output location was already overridden")
return
diff --git a/pelican/generators.py b/pelican/generators.py
--- a/pelican/generators.py
+++ b/pelican/generators.py
@@ -8,15 +8,27 @@
from itertools import chain, groupby
from operator import attrgetter
-from jinja2 import (BaseLoader, ChoiceLoader, Environment, FileSystemLoader,
- PrefixLoader, TemplateNotFound)
+from jinja2 import (
+ BaseLoader,
+ ChoiceLoader,
+ Environment,
+ FileSystemLoader,
+ PrefixLoader,
+ TemplateNotFound,
+)
from pelican.cache import FileStampDataCacher
from pelican.contents import Article, Page, Static
from pelican.plugins import signals
from pelican.readers import Readers
-from pelican.utils import (DateFormatter, copy, mkdir_p, order_content,
- posixize_path, process_translations)
+from pelican.utils import (
+ DateFormatter,
+ copy,
+ mkdir_p,
+ order_content,
+ posixize_path,
+ process_translations,
+)
logger = logging.getLogger(__name__)
@@ -28,8 +40,16 @@ class PelicanTemplateNotFound(Exception):
class Generator:
"""Baseclass generator"""
- def __init__(self, context, settings, path, theme, output_path,
- readers_cache_name='', **kwargs):
+ def __init__(
+ self,
+ context,
+ settings,
+ path,
+ theme,
+ output_path,
+ readers_cache_name="",
+ **kwargs,
+ ):
self.context = context
self.settings = settings
self.path = path
@@ -43,44 +63,45 @@ def __init__(self, context, settings, path, theme, output_path,
# templates cache
self._templates = {}
- self._templates_path = list(self.settings['THEME_TEMPLATES_OVERRIDES'])
+ self._templates_path = list(self.settings["THEME_TEMPLATES_OVERRIDES"])
- theme_templates_path = os.path.expanduser(
- os.path.join(self.theme, 'templates'))
+ theme_templates_path = os.path.expanduser(os.path.join(self.theme, "templates"))
self._templates_path.append(theme_templates_path)
theme_loader = FileSystemLoader(theme_templates_path)
simple_theme_path = os.path.dirname(os.path.abspath(__file__))
simple_loader = FileSystemLoader(
- os.path.join(simple_theme_path, "themes", "simple", "templates"))
+ os.path.join(simple_theme_path, "themes", "simple", "templates")
+ )
self.env = Environment(
- loader=ChoiceLoader([
- FileSystemLoader(self._templates_path),
- simple_loader, # implicit inheritance
- PrefixLoader({
- '!simple': simple_loader,
- '!theme': theme_loader
- }) # explicit ones
- ]),
- **self.settings['JINJA_ENVIRONMENT']
+ loader=ChoiceLoader(
+ [
+ FileSystemLoader(self._templates_path),
+ simple_loader, # implicit inheritance
+ PrefixLoader(
+ {"!simple": simple_loader, "!theme": theme_loader}
+ ), # explicit ones
+ ]
+ ),
+ **self.settings["JINJA_ENVIRONMENT"],
)
- logger.debug('Template list: %s', self.env.list_templates())
+ logger.debug("Template list: %s", self.env.list_templates())
# provide utils.strftime as a jinja filter
- self.env.filters.update({'strftime': DateFormatter()})
+ self.env.filters.update({"strftime": DateFormatter()})
# get custom Jinja filters from user settings
- custom_filters = self.settings['JINJA_FILTERS']
+ custom_filters = self.settings["JINJA_FILTERS"]
self.env.filters.update(custom_filters)
# get custom Jinja globals from user settings
- custom_globals = self.settings['JINJA_GLOBALS']
+ custom_globals = self.settings["JINJA_GLOBALS"]
self.env.globals.update(custom_globals)
# get custom Jinja tests from user settings
- custom_tests = self.settings['JINJA_TESTS']
+ custom_tests = self.settings["JINJA_TESTS"]
self.env.tests.update(custom_tests)
signals.generator_init.send(self)
@@ -91,7 +112,7 @@ def get_template(self, name):
templates ready to use with Jinja2.
"""
if name not in self._templates:
- for ext in self.settings['TEMPLATE_EXTENSIONS']:
+ for ext in self.settings["TEMPLATE_EXTENSIONS"]:
try:
self._templates[name] = self.env.get_template(name + ext)
break
@@ -100,9 +121,12 @@ def get_template(self, name):
if name not in self._templates:
raise PelicanTemplateNotFound(
- '[templates] unable to load {}[{}] from {}'.format(
- name, ', '.join(self.settings['TEMPLATE_EXTENSIONS']),
- self._templates_path))
+ "[templates] unable to load {}[{}] from {}".format(
+ name,
+ ", ".join(self.settings["TEMPLATE_EXTENSIONS"]),
+ self._templates_path,
+ )
+ )
return self._templates[name]
@@ -118,7 +142,7 @@ def _include_path(self, path, extensions=None):
basename = os.path.basename(path)
# check IGNORE_FILES
- ignores = self.settings['IGNORE_FILES']
+ ignores = self.settings["IGNORE_FILES"]
if any(fnmatch.fnmatch(basename, ignore) for ignore in ignores):
return False
@@ -147,20 +171,21 @@ def get_files(self, paths, exclude=[], extensions=None):
exclusions_by_dirpath.setdefault(parent_path, set()).add(subdir)
files = set()
- ignores = self.settings['IGNORE_FILES']
+ ignores = self.settings["IGNORE_FILES"]
for path in paths:
# careful: os.path.join() will add a slash when path == ''.
root = os.path.join(self.path, path) if path else self.path
if os.path.isdir(root):
for dirpath, dirs, temp_files in os.walk(
- root, topdown=True, followlinks=True):
+ root, topdown=True, followlinks=True
+ ):
excl = exclusions_by_dirpath.get(dirpath, ())
# We copy the `dirs` list as we will modify it in the loop:
for d in list(dirs):
- if (d in excl or
- any(fnmatch.fnmatch(d, ignore)
- for ignore in ignores)):
+ if d in excl or any(
+ fnmatch.fnmatch(d, ignore) for ignore in ignores
+ ):
if d in dirs:
dirs.remove(d)
@@ -178,7 +203,7 @@ def add_source_path(self, content, static=False):
Store a reference to its Content object, for url lookups later.
"""
location = content.get_relative_source_path()
- key = 'static_content' if static else 'generated_content'
+ key = "static_content" if static else "generated_content"
self.context[key][location] = content
def _add_failed_source_path(self, path, static=False):
@@ -186,7 +211,7 @@ def _add_failed_source_path(self, path, static=False):
(For example, one that was missing mandatory metadata.)
The path argument is expected to be relative to self.path.
"""
- key = 'static_content' if static else 'generated_content'
+ key = "static_content" if static else "generated_content"
self.context[key][posixize_path(os.path.normpath(path))] = None
def _is_potential_source_path(self, path, static=False):
@@ -195,14 +220,14 @@ def _is_potential_source_path(self, path, static=False):
before this method is called, even if they failed to process.)
The path argument is expected to be relative to self.path.
"""
- key = 'static_content' if static else 'generated_content'
- return (posixize_path(os.path.normpath(path)) in self.context[key])
+ key = "static_content" if static else "generated_content"
+ return posixize_path(os.path.normpath(path)) in self.context[key]
def add_static_links(self, content):
"""Add file links in content to context to be processed as Static
content.
"""
- self.context['static_links'] |= content.get_static_links()
+ self.context["static_links"] |= content.get_static_links()
def _update_context(self, items):
"""Update the context with the given items from the current processor.
@@ -211,7 +236,7 @@ def _update_context(self, items):
"""
for item in items:
value = getattr(self, item)
- if hasattr(value, 'items'):
+ if hasattr(value, "items"):
value = list(value.items()) # py3k safeguard for iterators
self.context[item] = value
@@ -221,37 +246,35 @@ def __str__(self):
class CachingGenerator(Generator, FileStampDataCacher):
- '''Subclass of Generator and FileStampDataCacher classes
+ """Subclass of Generator and FileStampDataCacher classes
enables content caching, either at the generator or reader level
- '''
+ """
def __init__(self, *args, **kwargs):
- '''Initialize the generator, then set up caching
+ """Initialize the generator, then set up caching
note the multiple inheritance structure
- '''
+ """
cls_name = self.__class__.__name__
- Generator.__init__(self, *args,
- readers_cache_name=(cls_name + '-Readers'),
- **kwargs)
-
- cache_this_level = \
- self.settings['CONTENT_CACHING_LAYER'] == 'generator'
- caching_policy = cache_this_level and self.settings['CACHE_CONTENT']
- load_policy = cache_this_level and self.settings['LOAD_CONTENT_CACHE']
- FileStampDataCacher.__init__(self, self.settings, cls_name,
- caching_policy, load_policy
- )
+ Generator.__init__(
+ self, *args, readers_cache_name=(cls_name + "-Readers"), **kwargs
+ )
+
+ cache_this_level = self.settings["CONTENT_CACHING_LAYER"] == "generator"
+ caching_policy = cache_this_level and self.settings["CACHE_CONTENT"]
+ load_policy = cache_this_level and self.settings["LOAD_CONTENT_CACHE"]
+ FileStampDataCacher.__init__(
+ self, self.settings, cls_name, caching_policy, load_policy
+ )
def _get_file_stamp(self, filename):
- '''Get filestamp for path relative to generator.path'''
+ """Get filestamp for path relative to generator.path"""
filename = os.path.join(self.path, filename)
return super()._get_file_stamp(filename)
class _FileLoader(BaseLoader):
-
def __init__(self, path, basedir):
self.path = path
self.fullpath = os.path.join(basedir, path)
@@ -260,22 +283,21 @@ def get_source(self, environment, template):
if template != self.path or not os.path.exists(self.fullpath):
raise TemplateNotFound(template)
mtime = os.path.getmtime(self.fullpath)
- with open(self.fullpath, encoding='utf-8') as f:
+ with open(self.fullpath, encoding="utf-8") as f:
source = f.read()
- return (source, self.fullpath,
- lambda: mtime == os.path.getmtime(self.fullpath))
+ return (source, self.fullpath, lambda: mtime == os.path.getmtime(self.fullpath))
class TemplatePagesGenerator(Generator):
-
def generate_output(self, writer):
- for source, dest in self.settings['TEMPLATE_PAGES'].items():
+ for source, dest in self.settings["TEMPLATE_PAGES"].items():
self.env.loader.loaders.insert(0, _FileLoader(source, self.path))
try:
template = self.env.get_template(source)
- rurls = self.settings['RELATIVE_URLS']
- writer.write_file(dest, template, self.context, rurls,
- override_output=True, url='')
+ rurls = self.settings["RELATIVE_URLS"]
+ writer.write_file(
+ dest, template, self.context, rurls, override_output=True, url=""
+ )
finally:
del self.env.loader.loaders[0]
@@ -286,13 +308,13 @@ class ArticlesGenerator(CachingGenerator):
def __init__(self, *args, **kwargs):
"""initialize properties"""
# Published, listed articles
- self.articles = [] # only articles in default language
+ self.articles = [] # only articles in default language
self.translations = []
# Published, unlisted articles
self.hidden_articles = []
self.hidden_translations = []
# Draft articles
- self.drafts = [] # only drafts in default language
+ self.drafts = [] # only drafts in default language
self.drafts_translations = []
self.dates = {}
self.period_archives = defaultdict(list)
@@ -306,263 +328,304 @@ def __init__(self, *args, **kwargs):
def generate_feeds(self, writer):
"""Generate the feeds from the current context, and output files."""
- if self.settings.get('FEED_ATOM'):
+ if self.settings.get("FEED_ATOM"):
writer.write_feed(
self.articles,
self.context,
- self.settings['FEED_ATOM'],
- self.settings.get('FEED_ATOM_URL', self.settings['FEED_ATOM'])
- )
+ self.settings["FEED_ATOM"],
+ self.settings.get("FEED_ATOM_URL", self.settings["FEED_ATOM"]),
+ )
- if self.settings.get('FEED_RSS'):
+ if self.settings.get("FEED_RSS"):
writer.write_feed(
self.articles,
self.context,
- self.settings['FEED_RSS'],
- self.settings.get('FEED_RSS_URL', self.settings['FEED_RSS']),
- feed_type='rss'
- )
+ self.settings["FEED_RSS"],
+ self.settings.get("FEED_RSS_URL", self.settings["FEED_RSS"]),
+ feed_type="rss",
+ )
- if (self.settings.get('FEED_ALL_ATOM') or
- self.settings.get('FEED_ALL_RSS')):
+ if self.settings.get("FEED_ALL_ATOM") or self.settings.get("FEED_ALL_RSS"):
all_articles = list(self.articles)
for article in self.articles:
all_articles.extend(article.translations)
- order_content(
- all_articles, order_by=self.settings['ARTICLE_ORDER_BY']
- )
+ order_content(all_articles, order_by=self.settings["ARTICLE_ORDER_BY"])
- if self.settings.get('FEED_ALL_ATOM'):
+ if self.settings.get("FEED_ALL_ATOM"):
writer.write_feed(
all_articles,
self.context,
- self.settings['FEED_ALL_ATOM'],
- self.settings.get('FEED_ALL_ATOM_URL',
- self.settings['FEED_ALL_ATOM'])
- )
+ self.settings["FEED_ALL_ATOM"],
+ self.settings.get(
+ "FEED_ALL_ATOM_URL", self.settings["FEED_ALL_ATOM"]
+ ),
+ )
- if self.settings.get('FEED_ALL_RSS'):
+ if self.settings.get("FEED_ALL_RSS"):
writer.write_feed(
all_articles,
self.context,
- self.settings['FEED_ALL_RSS'],
- self.settings.get('FEED_ALL_RSS_URL',
- self.settings['FEED_ALL_RSS']),
- feed_type='rss'
- )
+ self.settings["FEED_ALL_RSS"],
+ self.settings.get(
+ "FEED_ALL_RSS_URL", self.settings["FEED_ALL_RSS"]
+ ),
+ feed_type="rss",
+ )
for cat, arts in self.categories:
- if self.settings.get('CATEGORY_FEED_ATOM'):
+ if self.settings.get("CATEGORY_FEED_ATOM"):
writer.write_feed(
arts,
self.context,
- str(self.settings['CATEGORY_FEED_ATOM']).format(slug=cat.slug),
+ str(self.settings["CATEGORY_FEED_ATOM"]).format(slug=cat.slug),
self.settings.get(
- 'CATEGORY_FEED_ATOM_URL',
- str(self.settings['CATEGORY_FEED_ATOM']).format(
- slug=cat.slug
- )),
- feed_title=cat.name
- )
+ "CATEGORY_FEED_ATOM_URL",
+ str(self.settings["CATEGORY_FEED_ATOM"]).format(slug=cat.slug),
+ ),
+ feed_title=cat.name,
+ )
- if self.settings.get('CATEGORY_FEED_RSS'):
+ if self.settings.get("CATEGORY_FEED_RSS"):
writer.write_feed(
arts,
self.context,
- str(self.settings['CATEGORY_FEED_RSS']).format(slug=cat.slug),
+ str(self.settings["CATEGORY_FEED_RSS"]).format(slug=cat.slug),
self.settings.get(
- 'CATEGORY_FEED_RSS_URL',
- str(self.settings['CATEGORY_FEED_RSS']).format(
- slug=cat.slug
- )),
+ "CATEGORY_FEED_RSS_URL",
+ str(self.settings["CATEGORY_FEED_RSS"]).format(slug=cat.slug),
+ ),
feed_title=cat.name,
- feed_type='rss'
- )
+ feed_type="rss",
+ )
for auth, arts in self.authors:
- if self.settings.get('AUTHOR_FEED_ATOM'):
+ if self.settings.get("AUTHOR_FEED_ATOM"):
writer.write_feed(
arts,
self.context,
- str(self.settings['AUTHOR_FEED_ATOM']).format(slug=auth.slug),
+ str(self.settings["AUTHOR_FEED_ATOM"]).format(slug=auth.slug),
self.settings.get(
- 'AUTHOR_FEED_ATOM_URL',
- str(self.settings['AUTHOR_FEED_ATOM']).format(
- slug=auth.slug
- )),
- feed_title=auth.name
- )
+ "AUTHOR_FEED_ATOM_URL",
+ str(self.settings["AUTHOR_FEED_ATOM"]).format(slug=auth.slug),
+ ),
+ feed_title=auth.name,
+ )
- if self.settings.get('AUTHOR_FEED_RSS'):
+ if self.settings.get("AUTHOR_FEED_RSS"):
writer.write_feed(
arts,
self.context,
- str(self.settings['AUTHOR_FEED_RSS']).format(slug=auth.slug),
+ str(self.settings["AUTHOR_FEED_RSS"]).format(slug=auth.slug),
self.settings.get(
- 'AUTHOR_FEED_RSS_URL',
- str(self.settings['AUTHOR_FEED_RSS']).format(
- slug=auth.slug
- )),
+ "AUTHOR_FEED_RSS_URL",
+ str(self.settings["AUTHOR_FEED_RSS"]).format(slug=auth.slug),
+ ),
feed_title=auth.name,
- feed_type='rss'
- )
+ feed_type="rss",
+ )
- if (self.settings.get('TAG_FEED_ATOM') or
- self.settings.get('TAG_FEED_RSS')):
+ if self.settings.get("TAG_FEED_ATOM") or self.settings.get("TAG_FEED_RSS"):
for tag, arts in self.tags.items():
- if self.settings.get('TAG_FEED_ATOM'):
+ if self.settings.get("TAG_FEED_ATOM"):
writer.write_feed(
arts,
self.context,
- str(self.settings['TAG_FEED_ATOM']).format(slug=tag.slug),
+ str(self.settings["TAG_FEED_ATOM"]).format(slug=tag.slug),
self.settings.get(
- 'TAG_FEED_ATOM_URL',
- str(self.settings['TAG_FEED_ATOM']).format(
- slug=tag.slug
- )),
- feed_title=tag.name
- )
-
- if self.settings.get('TAG_FEED_RSS'):
+ "TAG_FEED_ATOM_URL",
+ str(self.settings["TAG_FEED_ATOM"]).format(slug=tag.slug),
+ ),
+ feed_title=tag.name,
+ )
+
+ if self.settings.get("TAG_FEED_RSS"):
writer.write_feed(
arts,
self.context,
- str(self.settings['TAG_FEED_RSS']).format(slug=tag.slug),
+ str(self.settings["TAG_FEED_RSS"]).format(slug=tag.slug),
self.settings.get(
- 'TAG_FEED_RSS_URL',
- str(self.settings['TAG_FEED_RSS']).format(
- slug=tag.slug
- )),
+ "TAG_FEED_RSS_URL",
+ str(self.settings["TAG_FEED_RSS"]).format(slug=tag.slug),
+ ),
feed_title=tag.name,
- feed_type='rss'
- )
+ feed_type="rss",
+ )
- if (self.settings.get('TRANSLATION_FEED_ATOM') or
- self.settings.get('TRANSLATION_FEED_RSS')):
+ if self.settings.get("TRANSLATION_FEED_ATOM") or self.settings.get(
+ "TRANSLATION_FEED_RSS"
+ ):
translations_feeds = defaultdict(list)
for article in chain(self.articles, self.translations):
translations_feeds[article.lang].append(article)
for lang, items in translations_feeds.items():
- items = order_content(
- items, order_by=self.settings['ARTICLE_ORDER_BY'])
- if self.settings.get('TRANSLATION_FEED_ATOM'):
+ items = order_content(items, order_by=self.settings["ARTICLE_ORDER_BY"])
+ if self.settings.get("TRANSLATION_FEED_ATOM"):
writer.write_feed(
items,
self.context,
- str(
- self.settings['TRANSLATION_FEED_ATOM']
- ).format(lang=lang),
+ str(self.settings["TRANSLATION_FEED_ATOM"]).format(lang=lang),
self.settings.get(
- 'TRANSLATION_FEED_ATOM_URL',
- str(
- self.settings['TRANSLATION_FEED_ATOM']
- ).format(lang=lang),
- )
+ "TRANSLATION_FEED_ATOM_URL",
+ str(self.settings["TRANSLATION_FEED_ATOM"]).format(
+ lang=lang
+ ),
+ ),
)
- if self.settings.get('TRANSLATION_FEED_RSS'):
+ if self.settings.get("TRANSLATION_FEED_RSS"):
writer.write_feed(
items,
self.context,
- str(
- self.settings['TRANSLATION_FEED_RSS']
- ).format(lang=lang),
+ str(self.settings["TRANSLATION_FEED_RSS"]).format(lang=lang),
self.settings.get(
- 'TRANSLATION_FEED_RSS_URL',
- str(self.settings['TRANSLATION_FEED_RSS'])).format(
- lang=lang
- ),
- feed_type='rss'
+ "TRANSLATION_FEED_RSS_URL",
+ str(self.settings["TRANSLATION_FEED_RSS"]),
+ ).format(lang=lang),
+ feed_type="rss",
)
def generate_articles(self, write):
"""Generate the articles."""
for article in chain(
- self.translations, self.articles,
- self.hidden_translations, self.hidden_articles
+ self.translations,
+ self.articles,
+ self.hidden_translations,
+ self.hidden_articles,
):
signals.article_generator_write_article.send(self, content=article)
- write(article.save_as, self.get_template(article.template),
- self.context, article=article, category=article.category,
- override_output=hasattr(article, 'override_save_as'),
- url=article.url, blog=True)
+ write(
+ article.save_as,
+ self.get_template(article.template),
+ self.context,
+ article=article,
+ category=article.category,
+ override_output=hasattr(article, "override_save_as"),
+ url=article.url,
+ blog=True,
+ )
def generate_period_archives(self, write):
"""Generate per-year, per-month, and per-day archives."""
try:
- template = self.get_template('period_archives')
+ template = self.get_template("period_archives")
except PelicanTemplateNotFound:
- template = self.get_template('archives')
+ template = self.get_template("archives")
for granularity in self.period_archives:
for period in self.period_archives[granularity]:
-
context = self.context.copy()
- context['period'] = period['period']
- context['period_num'] = period['period_num']
-
- write(period['save_as'], template, context,
- articles=period['articles'], dates=period['dates'],
- template_name='period_archives', blog=True,
- url=period['url'], all_articles=self.articles)
+ context["period"] = period["period"]
+ context["period_num"] = period["period_num"]
+
+ write(
+ period["save_as"],
+ template,
+ context,
+ articles=period["articles"],
+ dates=period["dates"],
+ template_name="period_archives",
+ blog=True,
+ url=period["url"],
+ all_articles=self.articles,
+ )
def generate_direct_templates(self, write):
"""Generate direct templates pages"""
- for template in self.settings['DIRECT_TEMPLATES']:
- save_as = self.settings.get("%s_SAVE_AS" % template.upper(),
- '%s.html' % template)
- url = self.settings.get("%s_URL" % template.upper(),
- '%s.html' % template)
+ for template in self.settings["DIRECT_TEMPLATES"]:
+ save_as = self.settings.get(
+ "%s_SAVE_AS" % template.upper(), "%s.html" % template
+ )
+ url = self.settings.get("%s_URL" % template.upper(), "%s.html" % template)
if not save_as:
continue
- write(save_as, self.get_template(template), self.context,
- articles=self.articles, dates=self.dates, blog=True,
- template_name=template,
- page_name=os.path.splitext(save_as)[0], url=url)
+ write(
+ save_as,
+ self.get_template(template),
+ self.context,
+ articles=self.articles,
+ dates=self.dates,
+ blog=True,
+ template_name=template,
+ page_name=os.path.splitext(save_as)[0],
+ url=url,
+ )
def generate_tags(self, write):
"""Generate Tags pages."""
- tag_template = self.get_template('tag')
+ tag_template = self.get_template("tag")
for tag, articles in self.tags.items():
dates = [article for article in self.dates if article in articles]
- write(tag.save_as, tag_template, self.context, tag=tag,
- url=tag.url, articles=articles, dates=dates,
- template_name='tag', blog=True, page_name=tag.page_name,
- all_articles=self.articles)
+ write(
+ tag.save_as,
+ tag_template,
+ self.context,
+ tag=tag,
+ url=tag.url,
+ articles=articles,
+ dates=dates,
+ template_name="tag",
+ blog=True,
+ page_name=tag.page_name,
+ all_articles=self.articles,
+ )
def generate_categories(self, write):
"""Generate category pages."""
- category_template = self.get_template('category')
+ category_template = self.get_template("category")
for cat, articles in self.categories:
dates = [article for article in self.dates if article in articles]
- write(cat.save_as, category_template, self.context, url=cat.url,
- category=cat, articles=articles, dates=dates,
- template_name='category', blog=True, page_name=cat.page_name,
- all_articles=self.articles)
+ write(
+ cat.save_as,
+ category_template,
+ self.context,
+ url=cat.url,
+ category=cat,
+ articles=articles,
+ dates=dates,
+ template_name="category",
+ blog=True,
+ page_name=cat.page_name,
+ all_articles=self.articles,
+ )
def generate_authors(self, write):
"""Generate Author pages."""
- author_template = self.get_template('author')
+ author_template = self.get_template("author")
for aut, articles in self.authors:
dates = [article for article in self.dates if article in articles]
- write(aut.save_as, author_template, self.context,
- url=aut.url, author=aut, articles=articles, dates=dates,
- template_name='author', blog=True,
- page_name=aut.page_name, all_articles=self.articles)
+ write(
+ aut.save_as,
+ author_template,
+ self.context,
+ url=aut.url,
+ author=aut,
+ articles=articles,
+ dates=dates,
+ template_name="author",
+ blog=True,
+ page_name=aut.page_name,
+ all_articles=self.articles,
+ )
def generate_drafts(self, write):
"""Generate drafts pages."""
for draft in chain(self.drafts_translations, self.drafts):
- write(draft.save_as, self.get_template(draft.template),
- self.context, article=draft, category=draft.category,
- override_output=hasattr(draft, 'override_save_as'),
- blog=True, all_articles=self.articles, url=draft.url)
+ write(
+ draft.save_as,
+ self.get_template(draft.template),
+ self.context,
+ article=draft,
+ category=draft.category,
+ override_output=hasattr(draft, "override_save_as"),
+ blog=True,
+ all_articles=self.articles,
+ url=draft.url,
+ )
def generate_pages(self, writer):
"""Generate the pages on the disk"""
- write = partial(writer.write_file,
- relative_urls=self.settings['RELATIVE_URLS'])
+ write = partial(writer.write_file, relative_urls=self.settings["RELATIVE_URLS"])
# to minimize the number of relative path stuff modification
# in writer, articles pass first
@@ -583,22 +646,28 @@ def generate_context(self):
all_drafts = []
hidden_articles = []
for f in self.get_files(
- self.settings['ARTICLE_PATHS'],
- exclude=self.settings['ARTICLE_EXCLUDES']):
+ self.settings["ARTICLE_PATHS"], exclude=self.settings["ARTICLE_EXCLUDES"]
+ ):
article = self.get_cached_data(f, None)
if article is None:
try:
article = self.readers.read_file(
- base_path=self.path, path=f, content_class=Article,
+ base_path=self.path,
+ path=f,
+ content_class=Article,
context=self.context,
preread_signal=signals.article_generator_preread,
preread_sender=self,
context_signal=signals.article_generator_context,
- context_sender=self)
+ context_sender=self,
+ )
except Exception as e:
logger.error(
- 'Could not process %s\n%s', f, e,
- exc_info=self.settings.get('DEBUG', False))
+ "Could not process %s\n%s",
+ f,
+ e,
+ exc_info=self.settings.get("DEBUG", False),
+ )
self._add_failed_source_path(f)
continue
@@ -620,8 +689,9 @@ def generate_context(self):
def _process(arts):
origs, translations = process_translations(
- arts, translation_id=self.settings['ARTICLE_TRANSLATION_ID'])
- origs = order_content(origs, self.settings['ARTICLE_ORDER_BY'])
+ arts, translation_id=self.settings["ARTICLE_TRANSLATION_ID"]
+ )
+ origs = order_content(origs, self.settings["ARTICLE_ORDER_BY"])
return origs, translations
self.articles, self.translations = _process(all_articles)
@@ -634,36 +704,45 @@ def _process(arts):
# only main articles are listed in categories and tags
# not translations or hidden articles
self.categories[article.category].append(article)
- if hasattr(article, 'tags'):
+ if hasattr(article, "tags"):
for tag in article.tags:
self.tags[tag].append(article)
- for author in getattr(article, 'authors', []):
+ for author in getattr(article, "authors", []):
self.authors[author].append(article)
self.dates = list(self.articles)
- self.dates.sort(key=attrgetter('date'),
- reverse=self.context['NEWEST_FIRST_ARCHIVES'])
+ self.dates.sort(
+ key=attrgetter("date"), reverse=self.context["NEWEST_FIRST_ARCHIVES"]
+ )
self.period_archives = self._build_period_archives(
- self.dates, self.articles, self.settings)
+ self.dates, self.articles, self.settings
+ )
# and generate the output :)
# order the categories per name
self.categories = list(self.categories.items())
- self.categories.sort(
- reverse=self.settings['REVERSE_CATEGORY_ORDER'])
+ self.categories.sort(reverse=self.settings["REVERSE_CATEGORY_ORDER"])
self.authors = list(self.authors.items())
self.authors.sort()
- self._update_context((
- 'articles', 'drafts', 'hidden_articles',
- 'dates', 'tags', 'categories',
- 'authors', 'related_posts'))
+ self._update_context(
+ (
+ "articles",
+ "drafts",
+ "hidden_articles",
+ "dates",
+ "tags",
+ "categories",
+ "authors",
+ "related_posts",
+ )
+ )
# _update_context flattens dicts, which should not happen to
# period_archives, so we update the context directly for it:
- self.context['period_archives'] = self.period_archives
+ self.context["period_archives"] = self.period_archives
self.save_cache()
self.readers.save_cache()
signals.article_generator_finalized.send(self)
@@ -677,29 +756,29 @@ def _build_period_archives(self, sorted_articles, articles, settings):
period_archives = defaultdict(list)
period_archives_settings = {
- 'year': {
- 'save_as': settings['YEAR_ARCHIVE_SAVE_AS'],
- 'url': settings['YEAR_ARCHIVE_URL'],
+ "year": {
+ "save_as": settings["YEAR_ARCHIVE_SAVE_AS"],
+ "url": settings["YEAR_ARCHIVE_URL"],
},
- 'month': {
- 'save_as': settings['MONTH_ARCHIVE_SAVE_AS'],
- 'url': settings['MONTH_ARCHIVE_URL'],
+ "month": {
+ "save_as": settings["MONTH_ARCHIVE_SAVE_AS"],
+ "url": settings["MONTH_ARCHIVE_URL"],
},
- 'day': {
- 'save_as': settings['DAY_ARCHIVE_SAVE_AS'],
- 'url': settings['DAY_ARCHIVE_URL'],
+ "day": {
+ "save_as": settings["DAY_ARCHIVE_SAVE_AS"],
+ "url": settings["DAY_ARCHIVE_URL"],
},
}
granularity_key_func = {
- 'year': attrgetter('date.year'),
- 'month': attrgetter('date.year', 'date.month'),
- 'day': attrgetter('date.year', 'date.month', 'date.day'),
+ "year": attrgetter("date.year"),
+ "month": attrgetter("date.year", "date.month"),
+ "day": attrgetter("date.year", "date.month", "date.day"),
}
- for granularity in 'year', 'month', 'day':
- save_as_fmt = period_archives_settings[granularity]['save_as']
- url_fmt = period_archives_settings[granularity]['url']
+ for granularity in "year", "month", "day":
+ save_as_fmt = period_archives_settings[granularity]["save_as"]
+ url_fmt = period_archives_settings[granularity]["url"]
key_func = granularity_key_func[granularity]
if not save_as_fmt:
@@ -710,26 +789,26 @@ def _build_period_archives(self, sorted_articles, articles, settings):
archive = {}
dates = list(group)
- archive['dates'] = dates
- archive['articles'] = [a for a in articles if a in dates]
+ archive["dates"] = dates
+ archive["articles"] = [a for a in articles if a in dates]
# use the first date to specify the period archive URL
# and save_as; the specific date used does not matter as
# they all belong to the same period
d = dates[0].date
- archive['save_as'] = save_as_fmt.format(date=d)
- archive['url'] = url_fmt.format(date=d)
+ archive["save_as"] = save_as_fmt.format(date=d)
+ archive["url"] = url_fmt.format(date=d)
- if granularity == 'year':
- archive['period'] = (period,)
- archive['period_num'] = (period,)
+ if granularity == "year":
+ archive["period"] = (period,)
+ archive["period_num"] = (period,)
else:
month_name = calendar.month_name[period[1]]
- if granularity == 'month':
- archive['period'] = (period[0], month_name)
+ if granularity == "month":
+ archive["period"] = (period[0], month_name)
else:
- archive['period'] = (period[0], month_name, period[2])
- archive['period_num'] = tuple(period)
+ archive["period"] = (period[0], month_name, period[2])
+ archive["period_num"] = tuple(period)
period_archives[granularity].append(archive)
@@ -741,13 +820,15 @@ def generate_output(self, writer):
signals.article_writer_finalized.send(self, writer=writer)
def refresh_metadata_intersite_links(self):
- for e in chain(self.articles,
- self.translations,
- self.drafts,
- self.drafts_translations,
- self.hidden_articles,
- self.hidden_translations):
- if hasattr(e, 'refresh_metadata_intersite_links'):
+ for e in chain(
+ self.articles,
+ self.translations,
+ self.drafts,
+ self.drafts_translations,
+ self.hidden_articles,
+ self.hidden_translations,
+ ):
+ if hasattr(e, "refresh_metadata_intersite_links"):
e.refresh_metadata_intersite_links()
@@ -769,22 +850,28 @@ def generate_context(self):
hidden_pages = []
draft_pages = []
for f in self.get_files(
- self.settings['PAGE_PATHS'],
- exclude=self.settings['PAGE_EXCLUDES']):
+ self.settings["PAGE_PATHS"], exclude=self.settings["PAGE_EXCLUDES"]
+ ):
page = self.get_cached_data(f, None)
if page is None:
try:
page = self.readers.read_file(
- base_path=self.path, path=f, content_class=Page,
+ base_path=self.path,
+ path=f,
+ content_class=Page,
context=self.context,
preread_signal=signals.page_generator_preread,
preread_sender=self,
context_signal=signals.page_generator_context,
- context_sender=self)
+ context_sender=self,
+ )
except Exception as e:
logger.error(
- 'Could not process %s\n%s', f, e,
- exc_info=self.settings.get('DEBUG', False))
+ "Could not process %s\n%s",
+ f,
+ e,
+ exc_info=self.settings.get("DEBUG", False),
+ )
self._add_failed_source_path(f)
continue
@@ -805,40 +892,51 @@ def generate_context(self):
def _process(pages):
origs, translations = process_translations(
- pages, translation_id=self.settings['PAGE_TRANSLATION_ID'])
- origs = order_content(origs, self.settings['PAGE_ORDER_BY'])
+ pages, translation_id=self.settings["PAGE_TRANSLATION_ID"]
+ )
+ origs = order_content(origs, self.settings["PAGE_ORDER_BY"])
return origs, translations
self.pages, self.translations = _process(all_pages)
self.hidden_pages, self.hidden_translations = _process(hidden_pages)
self.draft_pages, self.draft_translations = _process(draft_pages)
- self._update_context(('pages', 'hidden_pages', 'draft_pages'))
+ self._update_context(("pages", "hidden_pages", "draft_pages"))
self.save_cache()
self.readers.save_cache()
signals.page_generator_finalized.send(self)
def generate_output(self, writer):
- for page in chain(self.translations, self.pages,
- self.hidden_translations, self.hidden_pages,
- self.draft_translations, self.draft_pages):
+ for page in chain(
+ self.translations,
+ self.pages,
+ self.hidden_translations,
+ self.hidden_pages,
+ self.draft_translations,
+ self.draft_pages,
+ ):
signals.page_generator_write_page.send(self, content=page)
writer.write_file(
- page.save_as, self.get_template(page.template),
- self.context, page=page,
- relative_urls=self.settings['RELATIVE_URLS'],
- override_output=hasattr(page, 'override_save_as'),
- url=page.url)
+ page.save_as,
+ self.get_template(page.template),
+ self.context,
+ page=page,
+ relative_urls=self.settings["RELATIVE_URLS"],
+ override_output=hasattr(page, "override_save_as"),
+ url=page.url,
+ )
signals.page_writer_finalized.send(self, writer=writer)
def refresh_metadata_intersite_links(self):
- for e in chain(self.pages,
- self.hidden_pages,
- self.hidden_translations,
- self.draft_pages,
- self.draft_translations):
- if hasattr(e, 'refresh_metadata_intersite_links'):
+ for e in chain(
+ self.pages,
+ self.hidden_pages,
+ self.hidden_translations,
+ self.draft_pages,
+ self.draft_translations,
+ ):
+ if hasattr(e, "refresh_metadata_intersite_links"):
e.refresh_metadata_intersite_links()
@@ -853,71 +951,82 @@ def __init__(self, *args, **kwargs):
def generate_context(self):
self.staticfiles = []
- linked_files = set(self.context['static_links'])
- found_files = self.get_files(self.settings['STATIC_PATHS'],
- exclude=self.settings['STATIC_EXCLUDES'],
- extensions=False)
+ linked_files = set(self.context["static_links"])
+ found_files = self.get_files(
+ self.settings["STATIC_PATHS"],
+ exclude=self.settings["STATIC_EXCLUDES"],
+ extensions=False,
+ )
for f in linked_files | found_files:
-
# skip content source files unless the user explicitly wants them
- if self.settings['STATIC_EXCLUDE_SOURCES']:
+ if self.settings["STATIC_EXCLUDE_SOURCES"]:
if self._is_potential_source_path(f):
continue
static = self.readers.read_file(
- base_path=self.path, path=f, content_class=Static,
- fmt='static', context=self.context,
+ base_path=self.path,
+ path=f,
+ content_class=Static,
+ fmt="static",
+ context=self.context,
preread_signal=signals.static_generator_preread,
preread_sender=self,
context_signal=signals.static_generator_context,
- context_sender=self)
+ context_sender=self,
+ )
self.staticfiles.append(static)
self.add_source_path(static, static=True)
- self._update_context(('staticfiles',))
+ self._update_context(("staticfiles",))
signals.static_generator_finalized.send(self)
def generate_output(self, writer):
- self._copy_paths(self.settings['THEME_STATIC_PATHS'], self.theme,
- self.settings['THEME_STATIC_DIR'], self.output_path,
- os.curdir)
- for sc in self.context['staticfiles']:
+ self._copy_paths(
+ self.settings["THEME_STATIC_PATHS"],
+ self.theme,
+ self.settings["THEME_STATIC_DIR"],
+ self.output_path,
+ os.curdir,
+ )
+ for sc in self.context["staticfiles"]:
if self._file_update_required(sc):
self._link_or_copy_staticfile(sc)
else:
- logger.debug('%s is up to date, not copying', sc.source_path)
+ logger.debug("%s is up to date, not copying", sc.source_path)
- def _copy_paths(self, paths, source, destination, output_path,
- final_path=None):
+ def _copy_paths(self, paths, source, destination, output_path, final_path=None):
"""Copy all the paths from source to destination"""
for path in paths:
source_path = os.path.join(source, path)
if final_path:
if os.path.isfile(source_path):
- destination_path = os.path.join(output_path, destination,
- final_path,
- os.path.basename(path))
+ destination_path = os.path.join(
+ output_path, destination, final_path, os.path.basename(path)
+ )
else:
- destination_path = os.path.join(output_path, destination,
- final_path)
+ destination_path = os.path.join(
+ output_path, destination, final_path
+ )
else:
destination_path = os.path.join(output_path, destination, path)
- copy(source_path, destination_path,
- self.settings['IGNORE_FILES'])
+ copy(source_path, destination_path, self.settings["IGNORE_FILES"])
def _file_update_required(self, staticfile):
source_path = os.path.join(self.path, staticfile.source_path)
save_as = os.path.join(self.output_path, staticfile.save_as)
if not os.path.exists(save_as):
return True
- elif (self.settings['STATIC_CREATE_LINKS'] and
- os.path.samefile(source_path, save_as)):
+ elif self.settings["STATIC_CREATE_LINKS"] and os.path.samefile(
+ source_path, save_as
+ ):
return False
- elif (self.settings['STATIC_CREATE_LINKS'] and
- os.path.realpath(save_as) == source_path):
+ elif (
+ self.settings["STATIC_CREATE_LINKS"]
+ and os.path.realpath(save_as) == source_path
+ ):
return False
- elif not self.settings['STATIC_CHECK_IF_MODIFIED']:
+ elif not self.settings["STATIC_CHECK_IF_MODIFIED"]:
return True
else:
return self._source_is_newer(staticfile)
@@ -930,7 +1039,7 @@ def _source_is_newer(self, staticfile):
return s_mtime - d_mtime > 0.000001
def _link_or_copy_staticfile(self, sc):
- if self.settings['STATIC_CREATE_LINKS']:
+ if self.settings["STATIC_CREATE_LINKS"]:
self._link_staticfile(sc)
else:
self._copy_staticfile(sc)
@@ -940,7 +1049,7 @@ def _copy_staticfile(self, sc):
save_as = os.path.join(self.output_path, sc.save_as)
self._mkdir(os.path.dirname(save_as))
copy(source_path, save_as)
- logger.info('Copying %s to %s', sc.source_path, sc.save_as)
+ logger.info("Copying %s to %s", sc.source_path, sc.save_as)
def _link_staticfile(self, sc):
source_path = os.path.join(self.path, sc.source_path)
@@ -949,7 +1058,7 @@ def _link_staticfile(self, sc):
try:
if os.path.lexists(save_as):
os.unlink(save_as)
- logger.info('Linking %s and %s', sc.source_path, sc.save_as)
+ logger.info("Linking %s and %s", sc.source_path, sc.save_as)
if self.fallback_to_symlinks:
os.symlink(source_path, save_as)
else:
@@ -957,9 +1066,8 @@ def _link_staticfile(self, sc):
except OSError as err:
if err.errno == errno.EXDEV: # 18: Invalid cross-device link
logger.debug(
- "Cross-device links not valid. "
- "Creating symbolic links instead."
- )
+ "Cross-device links not valid. " "Creating symbolic links instead."
+ )
self.fallback_to_symlinks = True
self._link_staticfile(sc)
else:
@@ -972,19 +1080,17 @@ def _mkdir(self, path):
class SourceFileGenerator(Generator):
-
def generate_context(self):
- self.output_extension = self.settings['OUTPUT_SOURCES_EXTENSION']
+ self.output_extension = self.settings["OUTPUT_SOURCES_EXTENSION"]
def _create_source(self, obj):
output_path, _ = os.path.splitext(obj.save_as)
- dest = os.path.join(self.output_path,
- output_path + self.output_extension)
+ dest = os.path.join(self.output_path, output_path + self.output_extension)
copy(obj.source_path, dest)
def generate_output(self, writer=None):
- logger.info('Generating source files...')
- for obj in chain(self.context['articles'], self.context['pages']):
+ logger.info("Generating source files...")
+ for obj in chain(self.context["articles"], self.context["pages"]):
self._create_source(obj)
for obj_trans in obj.translations:
self._create_source(obj_trans)
diff --git a/pelican/log.py b/pelican/log.py
--- a/pelican/log.py
+++ b/pelican/log.py
@@ -4,9 +4,7 @@
from rich.console import Console
from rich.logging import RichHandler
-__all__ = [
- 'init'
-]
+__all__ = ["init"]
console = Console()
@@ -34,8 +32,8 @@ def filter(self, record):
return True
# extract group
- group = record.__dict__.get('limit_msg', None)
- group_args = record.__dict__.get('limit_args', ())
+ group = record.__dict__.get("limit_msg", None)
+ group_args = record.__dict__.get("limit_args", ())
# ignore record if it was already raised
message_key = (record.levelno, record.getMessage())
@@ -50,7 +48,7 @@ def filter(self, record):
if logger_level > logging.DEBUG:
template_key = (record.levelno, record.msg)
message_key = (record.levelno, record.getMessage())
- if (template_key in self._ignore or message_key in self._ignore):
+ if template_key in self._ignore or message_key in self._ignore:
return False
# check if we went over threshold
@@ -90,12 +88,12 @@ class FatalLogger(LimitLogger):
def warning(self, *args, **kwargs):
super().warning(*args, **kwargs)
if FatalLogger.warnings_fatal:
- raise RuntimeError('Warning encountered')
+ raise RuntimeError("Warning encountered")
def error(self, *args, **kwargs):
super().error(*args, **kwargs)
if FatalLogger.errors_fatal:
- raise RuntimeError('Error encountered')
+ raise RuntimeError("Error encountered")
logging.setLoggerClass(FatalLogger)
@@ -103,17 +101,19 @@ def error(self, *args, **kwargs):
logging.getLogger().__class__ = FatalLogger
-def init(level=None, fatal='', handler=RichHandler(console=console), name=None,
- logs_dedup_min_level=None):
- FatalLogger.warnings_fatal = fatal.startswith('warning')
+def init(
+ level=None,
+ fatal="",
+ handler=RichHandler(console=console),
+ name=None,
+ logs_dedup_min_level=None,
+):
+ FatalLogger.warnings_fatal = fatal.startswith("warning")
FatalLogger.errors_fatal = bool(fatal)
LOG_FORMAT = "%(message)s"
logging.basicConfig(
- level=level,
- format=LOG_FORMAT,
- datefmt="[%H:%M:%S]",
- handlers=[handler]
+ level=level, format=LOG_FORMAT, datefmt="[%H:%M:%S]", handlers=[handler]
)
logger = logging.getLogger(name)
@@ -126,17 +126,18 @@ def init(level=None, fatal='', handler=RichHandler(console=console), name=None,
def log_warnings():
import warnings
+
logging.captureWarnings(True)
warnings.simplefilter("default", DeprecationWarning)
- init(logging.DEBUG, name='py.warnings')
+ init(logging.DEBUG, name="py.warnings")
-if __name__ == '__main__':
+if __name__ == "__main__":
init(level=logging.DEBUG, name=__name__)
root_logger = logging.getLogger(__name__)
- root_logger.debug('debug')
- root_logger.info('info')
- root_logger.warning('warning')
- root_logger.error('error')
- root_logger.critical('critical')
+ root_logger.debug("debug")
+ root_logger.info("info")
+ root_logger.warning("warning")
+ root_logger.error("error")
+ root_logger.critical("critical")
diff --git a/pelican/paginator.py b/pelican/paginator.py
--- a/pelican/paginator.py
+++ b/pelican/paginator.py
@@ -6,8 +6,8 @@
logger = logging.getLogger(__name__)
PaginationRule = namedtuple(
- 'PaginationRule',
- 'min_page URL SAVE_AS',
+ "PaginationRule",
+ "min_page URL SAVE_AS",
)
@@ -19,7 +19,7 @@ def __init__(self, name, url, object_list, settings, per_page=None):
self.settings = settings
if per_page:
self.per_page = per_page
- self.orphans = settings['DEFAULT_ORPHANS']
+ self.orphans = settings["DEFAULT_ORPHANS"]
else:
self.per_page = len(object_list)
self.orphans = 0
@@ -32,14 +32,21 @@ def page(self, number):
top = bottom + self.per_page
if top + self.orphans >= self.count:
top = self.count
- return Page(self.name, self.url, self.object_list[bottom:top], number,
- self, self.settings)
+ return Page(
+ self.name,
+ self.url,
+ self.object_list[bottom:top],
+ number,
+ self,
+ self.settings,
+ )
def _get_count(self):
"Returns the total number of objects, across all pages."
if self._count is None:
self._count = len(self.object_list)
return self._count
+
count = property(_get_count)
def _get_num_pages(self):
@@ -48,6 +55,7 @@ def _get_num_pages(self):
hits = max(1, self.count - self.orphans)
self._num_pages = int(ceil(hits / (float(self.per_page) or 1)))
return self._num_pages
+
num_pages = property(_get_num_pages)
def _get_page_range(self):
@@ -56,6 +64,7 @@ def _get_page_range(self):
a template for loop.
"""
return list(range(1, self.num_pages + 1))
+
page_range = property(_get_page_range)
@@ -64,7 +73,7 @@ def __init__(self, name, url, object_list, number, paginator, settings):
self.full_name = name
self.name, self.extension = os.path.splitext(name)
dn, fn = os.path.split(name)
- self.base_name = dn if fn in ('index.htm', 'index.html') else self.name
+ self.base_name = dn if fn in ("index.htm", "index.html") else self.name
self.base_url = url
self.object_list = object_list
self.number = number
@@ -72,7 +81,7 @@ def __init__(self, name, url, object_list, number, paginator, settings):
self.settings = settings
def __repr__(self):
- return '<Page {} of {}>'.format(self.number, self.paginator.num_pages)
+ return "<Page {} of {}>".format(self.number, self.paginator.num_pages)
def has_next(self):
return self.number < self.paginator.num_pages
@@ -117,7 +126,7 @@ def _from_settings(self, key):
rule = None
# find the last matching pagination rule
- for p in self.settings['PAGINATION_PATTERNS']:
+ for p in self.settings["PAGINATION_PATTERNS"]:
if p.min_page == -1:
if not self.has_next():
rule = p
@@ -127,22 +136,22 @@ def _from_settings(self, key):
rule = p
if not rule:
- return ''
+ return ""
prop_value = getattr(rule, key)
if not isinstance(prop_value, str):
- logger.warning('%s is set to %s', key, prop_value)
+ logger.warning("%s is set to %s", key, prop_value)
return prop_value
# URL or SAVE_AS is a string, format it with a controlled context
context = {
- 'save_as': self.full_name,
- 'url': self.base_url,
- 'name': self.name,
- 'base_name': self.base_name,
- 'extension': self.extension,
- 'number': self.number,
+ "save_as": self.full_name,
+ "url": self.base_url,
+ "name": self.name,
+ "base_name": self.base_name,
+ "extension": self.extension,
+ "number": self.number,
}
ret = prop_value.format(**context)
@@ -155,9 +164,9 @@ def _from_settings(self, key):
# changed to lstrip() because that would remove all leading slashes and
# thus make the workaround impossible. See
# test_custom_pagination_pattern() for a verification of this.
- if ret.startswith('/'):
+ if ret.startswith("/"):
ret = ret[1:]
return ret
- url = property(functools.partial(_from_settings, key='URL'))
- save_as = property(functools.partial(_from_settings, key='SAVE_AS'))
+ url = property(functools.partial(_from_settings, key="URL"))
+ save_as = property(functools.partial(_from_settings, key="SAVE_AS"))
diff --git a/pelican/plugins/_utils.py b/pelican/plugins/_utils.py
--- a/pelican/plugins/_utils.py
+++ b/pelican/plugins/_utils.py
@@ -24,26 +24,26 @@ def get_namespace_plugins(ns_pkg=None):
return {
name: importlib.import_module(name)
- for finder, name, ispkg
- in iter_namespace(ns_pkg)
+ for finder, name, ispkg in iter_namespace(ns_pkg)
if ispkg
}
def list_plugins(ns_pkg=None):
from pelican.log import init as init_logging
+
init_logging(logging.INFO)
ns_plugins = get_namespace_plugins(ns_pkg)
if ns_plugins:
- logger.info('Plugins found:\n' + '\n'.join(ns_plugins))
+ logger.info("Plugins found:\n" + "\n".join(ns_plugins))
else:
- logger.info('No plugins are installed')
+ logger.info("No plugins are installed")
def load_legacy_plugin(plugin, plugin_paths):
- if '.' in plugin:
+ if "." in plugin:
# it is in a package, try to resolve package first
- package, _, _ = plugin.rpartition('.')
+ package, _, _ = plugin.rpartition(".")
load_legacy_plugin(package, plugin_paths)
# Try to find plugin in PLUGIN_PATHS
@@ -52,7 +52,7 @@ def load_legacy_plugin(plugin, plugin_paths):
# If failed, try to find it in normal importable locations
spec = importlib.util.find_spec(plugin)
if spec is None:
- raise ImportError('Cannot import plugin `{}`'.format(plugin))
+ raise ImportError("Cannot import plugin `{}`".format(plugin))
else:
# Avoid loading the same plugin twice
if spec.name in sys.modules:
@@ -78,30 +78,28 @@ def load_legacy_plugin(plugin, plugin_paths):
def load_plugins(settings):
- logger.debug('Finding namespace plugins')
+ logger.debug("Finding namespace plugins")
namespace_plugins = get_namespace_plugins()
if namespace_plugins:
- logger.debug('Namespace plugins found:\n' +
- '\n'.join(namespace_plugins))
+ logger.debug("Namespace plugins found:\n" + "\n".join(namespace_plugins))
plugins = []
- if settings.get('PLUGINS') is not None:
- for plugin in settings['PLUGINS']:
+ if settings.get("PLUGINS") is not None:
+ for plugin in settings["PLUGINS"]:
if isinstance(plugin, str):
- logger.debug('Loading plugin `%s`', plugin)
+ logger.debug("Loading plugin `%s`", plugin)
# try to find in namespace plugins
if plugin in namespace_plugins:
plugin = namespace_plugins[plugin]
- elif 'pelican.plugins.{}'.format(plugin) in namespace_plugins:
- plugin = namespace_plugins['pelican.plugins.{}'.format(
- plugin)]
+ elif "pelican.plugins.{}".format(plugin) in namespace_plugins:
+ plugin = namespace_plugins["pelican.plugins.{}".format(plugin)]
# try to import it
else:
try:
plugin = load_legacy_plugin(
- plugin,
- settings.get('PLUGIN_PATHS', []))
+ plugin, settings.get("PLUGIN_PATHS", [])
+ )
except ImportError as e:
- logger.error('Cannot load plugin `%s`\n%s', plugin, e)
+ logger.error("Cannot load plugin `%s`\n%s", plugin, e)
continue
plugins.append(plugin)
else:
diff --git a/pelican/plugins/signals.py b/pelican/plugins/signals.py
--- a/pelican/plugins/signals.py
+++ b/pelican/plugins/signals.py
@@ -2,48 +2,48 @@
# Run-level signals:
-initialized = signal('pelican_initialized')
-get_generators = signal('get_generators')
-all_generators_finalized = signal('all_generators_finalized')
-get_writer = signal('get_writer')
-finalized = signal('pelican_finalized')
+initialized = signal("pelican_initialized")
+get_generators = signal("get_generators")
+all_generators_finalized = signal("all_generators_finalized")
+get_writer = signal("get_writer")
+finalized = signal("pelican_finalized")
# Reader-level signals
-readers_init = signal('readers_init')
+readers_init = signal("readers_init")
# Generator-level signals
-generator_init = signal('generator_init')
+generator_init = signal("generator_init")
-article_generator_init = signal('article_generator_init')
-article_generator_pretaxonomy = signal('article_generator_pretaxonomy')
-article_generator_finalized = signal('article_generator_finalized')
-article_generator_write_article = signal('article_generator_write_article')
-article_writer_finalized = signal('article_writer_finalized')
+article_generator_init = signal("article_generator_init")
+article_generator_pretaxonomy = signal("article_generator_pretaxonomy")
+article_generator_finalized = signal("article_generator_finalized")
+article_generator_write_article = signal("article_generator_write_article")
+article_writer_finalized = signal("article_writer_finalized")
-page_generator_init = signal('page_generator_init')
-page_generator_finalized = signal('page_generator_finalized')
-page_generator_write_page = signal('page_generator_write_page')
-page_writer_finalized = signal('page_writer_finalized')
+page_generator_init = signal("page_generator_init")
+page_generator_finalized = signal("page_generator_finalized")
+page_generator_write_page = signal("page_generator_write_page")
+page_writer_finalized = signal("page_writer_finalized")
-static_generator_init = signal('static_generator_init')
-static_generator_finalized = signal('static_generator_finalized')
+static_generator_init = signal("static_generator_init")
+static_generator_finalized = signal("static_generator_finalized")
# Page-level signals
-article_generator_preread = signal('article_generator_preread')
-article_generator_context = signal('article_generator_context')
+article_generator_preread = signal("article_generator_preread")
+article_generator_context = signal("article_generator_context")
-page_generator_preread = signal('page_generator_preread')
-page_generator_context = signal('page_generator_context')
+page_generator_preread = signal("page_generator_preread")
+page_generator_context = signal("page_generator_context")
-static_generator_preread = signal('static_generator_preread')
-static_generator_context = signal('static_generator_context')
+static_generator_preread = signal("static_generator_preread")
+static_generator_context = signal("static_generator_context")
-content_object_init = signal('content_object_init')
+content_object_init = signal("content_object_init")
# Writers signals
-content_written = signal('content_written')
-feed_generated = signal('feed_generated')
-feed_written = signal('feed_written')
+content_written = signal("content_written")
+feed_generated = signal("feed_generated")
+feed_written = signal("feed_written")
diff --git a/pelican/readers.py b/pelican/readers.py
--- a/pelican/readers.py
+++ b/pelican/readers.py
@@ -31,33 +31,29 @@
_DISCARD = object()
DUPLICATES_DEFINITIONS_ALLOWED = {
- 'tags': False,
- 'date': False,
- 'modified': False,
- 'status': False,
- 'category': False,
- 'author': False,
- 'save_as': False,
- 'url': False,
- 'authors': False,
- 'slug': False
+ "tags": False,
+ "date": False,
+ "modified": False,
+ "status": False,
+ "category": False,
+ "author": False,
+ "save_as": False,
+ "url": False,
+ "authors": False,
+ "slug": False,
}
METADATA_PROCESSORS = {
- 'tags': lambda x, y: ([
- Tag(tag, y)
- for tag in ensure_metadata_list(x)
- ] or _DISCARD),
- 'date': lambda x, y: get_date(x.replace('_', ' ')),
- 'modified': lambda x, y: get_date(x),
- 'status': lambda x, y: x.strip() or _DISCARD,
- 'category': lambda x, y: _process_if_nonempty(Category, x, y),
- 'author': lambda x, y: _process_if_nonempty(Author, x, y),
- 'authors': lambda x, y: ([
- Author(author, y)
- for author in ensure_metadata_list(x)
- ] or _DISCARD),
- 'slug': lambda x, y: x.strip() or _DISCARD,
+ "tags": lambda x, y: ([Tag(tag, y) for tag in ensure_metadata_list(x)] or _DISCARD),
+ "date": lambda x, y: get_date(x.replace("_", " ")),
+ "modified": lambda x, y: get_date(x),
+ "status": lambda x, y: x.strip() or _DISCARD,
+ "category": lambda x, y: _process_if_nonempty(Category, x, y),
+ "author": lambda x, y: _process_if_nonempty(Author, x, y),
+ "authors": lambda x, y: (
+ [Author(author, y) for author in ensure_metadata_list(x)] or _DISCARD
+ ),
+ "slug": lambda x, y: x.strip() or _DISCARD,
}
logger = logging.getLogger(__name__)
@@ -65,25 +61,23 @@
def ensure_metadata_list(text):
"""Canonicalize the format of a list of authors or tags. This works
- the same way as Docutils' "authors" field: if it's already a list,
- those boundaries are preserved; otherwise, it must be a string;
- if the string contains semicolons, it is split on semicolons;
- otherwise, it is split on commas. This allows you to write
- author lists in either "Jane Doe, John Doe" or "Doe, Jane; Doe, John"
- format.
-
- Regardless, all list items undergo .strip() before returning, and
- empty items are discarded.
+ the same way as Docutils' "authors" field: if it's already a list,
+ those boundaries are preserved; otherwise, it must be a string;
+ if the string contains semicolons, it is split on semicolons;
+ otherwise, it is split on commas. This allows you to write
+ author lists in either "Jane Doe, John Doe" or "Doe, Jane; Doe, John"
+ format.
+
+ Regardless, all list items undergo .strip() before returning, and
+ empty items are discarded.
"""
if isinstance(text, str):
- if ';' in text:
- text = text.split(';')
+ if ";" in text:
+ text = text.split(";")
else:
- text = text.split(',')
+ text = text.split(",")
- return list(OrderedDict.fromkeys(
- [v for v in (w.strip() for w in text) if v]
- ))
+ return list(OrderedDict.fromkeys([v for v in (w.strip() for w in text) if v]))
def _process_if_nonempty(processor, name, settings):
@@ -112,8 +106,9 @@ class BaseReader:
Markdown).
"""
+
enabled = True
- file_extensions = ['static']
+ file_extensions = ["static"]
extensions = None
def __init__(self, settings):
@@ -132,13 +127,12 @@ def read(self, source_path):
class _FieldBodyTranslator(HTMLTranslator):
-
def __init__(self, document):
super().__init__(document)
self.compact_p = None
def astext(self):
- return ''.join(self.body)
+ return "".join(self.body)
def visit_field_body(self, node):
pass
@@ -154,27 +148,25 @@ def render_node_to_html(document, node, field_body_translator_class):
class PelicanHTMLWriter(Writer):
-
def __init__(self):
super().__init__()
self.translator_class = PelicanHTMLTranslator
class PelicanHTMLTranslator(HTMLTranslator):
-
def visit_abbreviation(self, node):
attrs = {}
- if node.hasattr('explanation'):
- attrs['title'] = node['explanation']
- self.body.append(self.starttag(node, 'abbr', '', **attrs))
+ if node.hasattr("explanation"):
+ attrs["title"] = node["explanation"]
+ self.body.append(self.starttag(node, "abbr", "", **attrs))
def depart_abbreviation(self, node):
- self.body.append('</abbr>')
+ self.body.append("</abbr>")
def visit_image(self, node):
# set an empty alt if alt is not specified
# avoids that alt is taken from src
- node['alt'] = node.get('alt', '')
+ node["alt"] = node.get("alt", "")
return HTMLTranslator.visit_image(self, node)
@@ -194,7 +186,7 @@ class RstReader(BaseReader):
"""
enabled = bool(docutils)
- file_extensions = ['rst']
+ file_extensions = ["rst"]
writer_class = PelicanHTMLWriter
field_body_translator_class = _FieldBodyTranslator
@@ -202,25 +194,28 @@ class RstReader(BaseReader):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- lang_code = self.settings.get('DEFAULT_LANG', 'en')
+ lang_code = self.settings.get("DEFAULT_LANG", "en")
if get_docutils_lang(lang_code):
self._language_code = lang_code
else:
- logger.warning("Docutils has no localization for '%s'."
- " Using 'en' instead.", lang_code)
- self._language_code = 'en'
+ logger.warning(
+ "Docutils has no localization for '%s'." " Using 'en' instead.",
+ lang_code,
+ )
+ self._language_code = "en"
def _parse_metadata(self, document, source_path):
"""Return the dict containing document metadata"""
- formatted_fields = self.settings['FORMATTED_FIELDS']
+ formatted_fields = self.settings["FORMATTED_FIELDS"]
output = {}
if document.first_child_matching_class(docutils.nodes.title) is None:
logger.warning(
- 'Document title missing in file %s: '
- 'Ensure exactly one top level section',
- source_path)
+ "Document title missing in file %s: "
+ "Ensure exactly one top level section",
+ source_path,
+ )
try:
# docutils 0.18.1+
@@ -231,16 +226,16 @@ def _parse_metadata(self, document, source_path):
for docinfo in nodes:
for element in docinfo.children:
- if element.tagname == 'field': # custom fields (e.g. summary)
+ if element.tagname == "field": # custom fields (e.g. summary)
name_elem, body_elem = element.children
name = name_elem.astext()
if name.lower() in formatted_fields:
value = render_node_to_html(
- document, body_elem,
- self.field_body_translator_class)
+ document, body_elem, self.field_body_translator_class
+ )
else:
value = body_elem.astext()
- elif element.tagname == 'authors': # author list
+ elif element.tagname == "authors": # author list
name = element.tagname
value = [element.astext() for element in element.children]
else: # standard fields (e.g. address)
@@ -252,22 +247,24 @@ def _parse_metadata(self, document, source_path):
return output
def _get_publisher(self, source_path):
- extra_params = {'initial_header_level': '2',
- 'syntax_highlight': 'short',
- 'input_encoding': 'utf-8',
- 'language_code': self._language_code,
- 'halt_level': 2,
- 'traceback': True,
- 'warning_stream': StringIO(),
- 'embed_stylesheet': False}
- user_params = self.settings.get('DOCUTILS_SETTINGS')
+ extra_params = {
+ "initial_header_level": "2",
+ "syntax_highlight": "short",
+ "input_encoding": "utf-8",
+ "language_code": self._language_code,
+ "halt_level": 2,
+ "traceback": True,
+ "warning_stream": StringIO(),
+ "embed_stylesheet": False,
+ }
+ user_params = self.settings.get("DOCUTILS_SETTINGS")
if user_params:
extra_params.update(user_params)
pub = docutils.core.Publisher(
- writer=self.writer_class(),
- destination_class=docutils.io.StringOutput)
- pub.set_components('standalone', 'restructuredtext', 'html')
+ writer=self.writer_class(), destination_class=docutils.io.StringOutput
+ )
+ pub.set_components("standalone", "restructuredtext", "html")
pub.process_programmatic_settings(None, extra_params, None)
pub.set_source(source_path=source_path)
pub.publish()
@@ -277,10 +274,10 @@ def read(self, source_path):
"""Parses restructured text"""
pub = self._get_publisher(source_path)
parts = pub.writer.parts
- content = parts.get('body')
+ content = parts.get("body")
metadata = self._parse_metadata(pub.document, source_path)
- metadata.setdefault('title', parts.get('title'))
+ metadata.setdefault("title", parts.get("title"))
return content, metadata
@@ -289,26 +286,26 @@ class MarkdownReader(BaseReader):
"""Reader for Markdown files"""
enabled = bool(Markdown)
- file_extensions = ['md', 'markdown', 'mkd', 'mdown']
+ file_extensions = ["md", "markdown", "mkd", "mdown"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- settings = self.settings['MARKDOWN']
- settings.setdefault('extension_configs', {})
- settings.setdefault('extensions', [])
- for extension in settings['extension_configs'].keys():
- if extension not in settings['extensions']:
- settings['extensions'].append(extension)
- if 'markdown.extensions.meta' not in settings['extensions']:
- settings['extensions'].append('markdown.extensions.meta')
+ settings = self.settings["MARKDOWN"]
+ settings.setdefault("extension_configs", {})
+ settings.setdefault("extensions", [])
+ for extension in settings["extension_configs"].keys():
+ if extension not in settings["extensions"]:
+ settings["extensions"].append(extension)
+ if "markdown.extensions.meta" not in settings["extensions"]:
+ settings["extensions"].append("markdown.extensions.meta")
self._source_path = None
def _parse_metadata(self, meta):
"""Return the dict containing document metadata"""
- formatted_fields = self.settings['FORMATTED_FIELDS']
+ formatted_fields = self.settings["FORMATTED_FIELDS"]
# prevent metadata extraction in fields
- self._md.preprocessors.deregister('meta')
+ self._md.preprocessors.deregister("meta")
output = {}
for name, value in meta.items():
@@ -323,9 +320,10 @@ def _parse_metadata(self, meta):
elif not DUPLICATES_DEFINITIONS_ALLOWED.get(name, True):
if len(value) > 1:
logger.warning(
- 'Duplicate definition of `%s` '
- 'for %s. Using first one.',
- name, self._source_path)
+ "Duplicate definition of `%s` " "for %s. Using first one.",
+ name,
+ self._source_path,
+ )
output[name] = self.process_metadata(name, value[0])
elif len(value) > 1:
# handle list metadata as list of string
@@ -339,11 +337,11 @@ def read(self, source_path):
"""Parse content and metadata of markdown files"""
self._source_path = source_path
- self._md = Markdown(**self.settings['MARKDOWN'])
+ self._md = Markdown(**self.settings["MARKDOWN"])
with pelican_open(source_path) as text:
content = self._md.convert(text)
- if hasattr(self._md, 'Meta'):
+ if hasattr(self._md, "Meta"):
metadata = self._parse_metadata(self._md.Meta)
else:
metadata = {}
@@ -353,17 +351,17 @@ def read(self, source_path):
class HTMLReader(BaseReader):
"""Parses HTML files as input, looking for meta, title, and body tags"""
- file_extensions = ['htm', 'html']
+ file_extensions = ["htm", "html"]
enabled = True
class _HTMLParser(HTMLParser):
def __init__(self, settings, filename):
super().__init__(convert_charrefs=False)
- self.body = ''
+ self.body = ""
self.metadata = {}
self.settings = settings
- self._data_buffer = ''
+ self._data_buffer = ""
self._filename = filename
@@ -374,59 +372,59 @@ def __init__(self, settings, filename):
self._in_tags = False
def handle_starttag(self, tag, attrs):
- if tag == 'head' and self._in_top_level:
+ if tag == "head" and self._in_top_level:
self._in_top_level = False
self._in_head = True
- elif tag == 'title' and self._in_head:
+ elif tag == "title" and self._in_head:
self._in_title = True
- self._data_buffer = ''
- elif tag == 'body' and self._in_top_level:
+ self._data_buffer = ""
+ elif tag == "body" and self._in_top_level:
self._in_top_level = False
self._in_body = True
- self._data_buffer = ''
- elif tag == 'meta' and self._in_head:
+ self._data_buffer = ""
+ elif tag == "meta" and self._in_head:
self._handle_meta_tag(attrs)
elif self._in_body:
self._data_buffer += self.build_tag(tag, attrs, False)
def handle_endtag(self, tag):
- if tag == 'head':
+ if tag == "head":
if self._in_head:
self._in_head = False
self._in_top_level = True
- elif self._in_head and tag == 'title':
+ elif self._in_head and tag == "title":
self._in_title = False
- self.metadata['title'] = self._data_buffer
- elif tag == 'body':
+ self.metadata["title"] = self._data_buffer
+ elif tag == "body":
self.body = self._data_buffer
self._in_body = False
self._in_top_level = True
elif self._in_body:
- self._data_buffer += '</{}>'.format(escape(tag))
+ self._data_buffer += "</{}>".format(escape(tag))
def handle_startendtag(self, tag, attrs):
- if tag == 'meta' and self._in_head:
+ if tag == "meta" and self._in_head:
self._handle_meta_tag(attrs)
if self._in_body:
self._data_buffer += self.build_tag(tag, attrs, True)
def handle_comment(self, data):
- self._data_buffer += '<!--{}-->'.format(data)
+ self._data_buffer += "<!--{}-->".format(data)
def handle_data(self, data):
self._data_buffer += data
def handle_entityref(self, data):
- self._data_buffer += '&{};'.format(data)
+ self._data_buffer += "&{};".format(data)
def handle_charref(self, data):
- self._data_buffer += '&#{};'.format(data)
+ self._data_buffer += "&#{};".format(data)
def build_tag(self, tag, attrs, close_tag):
- result = '<{}'.format(escape(tag))
+ result = "<{}".format(escape(tag))
for k, v in attrs:
- result += ' ' + escape(k)
+ result += " " + escape(k)
if v is not None:
# If the attribute value contains a double quote, surround
# with single quotes, otherwise use double quotes.
@@ -435,33 +433,39 @@ def build_tag(self, tag, attrs, close_tag):
else:
result += '="{}"'.format(escape(v, quote=False))
if close_tag:
- return result + ' />'
- return result + '>'
+ return result + " />"
+ return result + ">"
def _handle_meta_tag(self, attrs):
- name = self._attr_value(attrs, 'name')
+ name = self._attr_value(attrs, "name")
if name is None:
attr_list = ['{}="{}"'.format(k, v) for k, v in attrs]
- attr_serialized = ', '.join(attr_list)
- logger.warning("Meta tag in file %s does not have a 'name' "
- "attribute, skipping. Attributes: %s",
- self._filename, attr_serialized)
+ attr_serialized = ", ".join(attr_list)
+ logger.warning(
+ "Meta tag in file %s does not have a 'name' "
+ "attribute, skipping. Attributes: %s",
+ self._filename,
+ attr_serialized,
+ )
return
name = name.lower()
- contents = self._attr_value(attrs, 'content', '')
+ contents = self._attr_value(attrs, "content", "")
if not contents:
- contents = self._attr_value(attrs, 'contents', '')
+ contents = self._attr_value(attrs, "contents", "")
if contents:
logger.warning(
"Meta tag attribute 'contents' used in file %s, should"
" be changed to 'content'",
self._filename,
- extra={'limit_msg': "Other files have meta tag "
- "attribute 'contents' that should "
- "be changed to 'content'"})
+ extra={
+ "limit_msg": "Other files have meta tag "
+ "attribute 'contents' that should "
+ "be changed to 'content'"
+ },
+ )
- if name == 'keywords':
- name = 'tags'
+ if name == "keywords":
+ name = "tags"
if name in self.metadata:
# if this metadata already exists (i.e. a previous tag with the
@@ -501,22 +505,23 @@ class Readers(FileStampDataCacher):
"""
- def __init__(self, settings=None, cache_name=''):
+ def __init__(self, settings=None, cache_name=""):
self.settings = settings or {}
self.readers = {}
self.reader_classes = {}
for cls in [BaseReader] + BaseReader.__subclasses__():
if not cls.enabled:
- logger.debug('Missing dependencies for %s',
- ', '.join(cls.file_extensions))
+ logger.debug(
+ "Missing dependencies for %s", ", ".join(cls.file_extensions)
+ )
continue
for ext in cls.file_extensions:
self.reader_classes[ext] = cls
- if self.settings['READERS']:
- self.reader_classes.update(self.settings['READERS'])
+ if self.settings["READERS"]:
+ self.reader_classes.update(self.settings["READERS"])
signals.readers_init.send(self)
@@ -527,53 +532,67 @@ def __init__(self, settings=None, cache_name=''):
self.readers[fmt] = reader_class(self.settings)
# set up caching
- cache_this_level = (cache_name != '' and
- self.settings['CONTENT_CACHING_LAYER'] == 'reader')
- caching_policy = cache_this_level and self.settings['CACHE_CONTENT']
- load_policy = cache_this_level and self.settings['LOAD_CONTENT_CACHE']
+ cache_this_level = (
+ cache_name != "" and self.settings["CONTENT_CACHING_LAYER"] == "reader"
+ )
+ caching_policy = cache_this_level and self.settings["CACHE_CONTENT"]
+ load_policy = cache_this_level and self.settings["LOAD_CONTENT_CACHE"]
super().__init__(settings, cache_name, caching_policy, load_policy)
@property
def extensions(self):
return self.readers.keys()
- def read_file(self, base_path, path, content_class=Page, fmt=None,
- context=None, preread_signal=None, preread_sender=None,
- context_signal=None, context_sender=None):
+ def read_file(
+ self,
+ base_path,
+ path,
+ content_class=Page,
+ fmt=None,
+ context=None,
+ preread_signal=None,
+ preread_sender=None,
+ context_signal=None,
+ context_sender=None,
+ ):
"""Return a content object parsed with the given format."""
path = os.path.abspath(os.path.join(base_path, path))
source_path = posixize_path(os.path.relpath(path, base_path))
- logger.debug(
- 'Read file %s -> %s',
- source_path, content_class.__name__)
+ logger.debug("Read file %s -> %s", source_path, content_class.__name__)
if not fmt:
_, ext = os.path.splitext(os.path.basename(path))
fmt = ext[1:]
if fmt not in self.readers:
- raise TypeError(
- 'Pelican does not know how to parse %s', path)
+ raise TypeError("Pelican does not know how to parse %s", path)
if preread_signal:
- logger.debug(
- 'Signal %s.send(%s)',
- preread_signal.name, preread_sender)
+ logger.debug("Signal %s.send(%s)", preread_signal.name, preread_sender)
preread_signal.send(preread_sender)
reader = self.readers[fmt]
- metadata = _filter_discardable_metadata(default_metadata(
- settings=self.settings, process=reader.process_metadata))
- metadata.update(path_metadata(
- full_path=path, source_path=source_path,
- settings=self.settings))
- metadata.update(_filter_discardable_metadata(parse_path_metadata(
- source_path=source_path, settings=self.settings,
- process=reader.process_metadata)))
+ metadata = _filter_discardable_metadata(
+ default_metadata(settings=self.settings, process=reader.process_metadata)
+ )
+ metadata.update(
+ path_metadata(
+ full_path=path, source_path=source_path, settings=self.settings
+ )
+ )
+ metadata.update(
+ _filter_discardable_metadata(
+ parse_path_metadata(
+ source_path=source_path,
+ settings=self.settings,
+ process=reader.process_metadata,
+ )
+ )
+ )
reader_name = reader.__class__.__name__
- metadata['reader'] = reader_name.replace('Reader', '').lower()
+ metadata["reader"] = reader_name.replace("Reader", "").lower()
content, reader_metadata = self.get_cached_data(path, (None, None))
if content is None:
@@ -587,14 +606,14 @@ def read_file(self, base_path, path, content_class=Page, fmt=None,
find_empty_alt(content, path)
# eventually filter the content with typogrify if asked so
- if self.settings['TYPOGRIFY']:
+ if self.settings["TYPOGRIFY"]:
from typogrify.filters import typogrify
import smartypants
- typogrify_dashes = self.settings['TYPOGRIFY_DASHES']
- if typogrify_dashes == 'oldschool':
+ typogrify_dashes = self.settings["TYPOGRIFY_DASHES"]
+ if typogrify_dashes == "oldschool":
smartypants.Attr.default = smartypants.Attr.set2
- elif typogrify_dashes == 'oldschool_inverted':
+ elif typogrify_dashes == "oldschool_inverted":
smartypants.Attr.default = smartypants.Attr.set3
else:
smartypants.Attr.default = smartypants.Attr.set1
@@ -608,31 +627,32 @@ def read_file(self, base_path, path, content_class=Page, fmt=None,
def typogrify_wrapper(text):
"""Ensures ignore_tags feature is backward compatible"""
try:
- return typogrify(
- text,
- self.settings['TYPOGRIFY_IGNORE_TAGS'])
+ return typogrify(text, self.settings["TYPOGRIFY_IGNORE_TAGS"])
except TypeError:
return typogrify(text)
if content:
content = typogrify_wrapper(content)
- if 'title' in metadata:
- metadata['title'] = typogrify_wrapper(metadata['title'])
+ if "title" in metadata:
+ metadata["title"] = typogrify_wrapper(metadata["title"])
- if 'summary' in metadata:
- metadata['summary'] = typogrify_wrapper(metadata['summary'])
+ if "summary" in metadata:
+ metadata["summary"] = typogrify_wrapper(metadata["summary"])
if context_signal:
logger.debug(
- 'Signal %s.send(%s, <metadata>)',
- context_signal.name,
- context_sender)
+ "Signal %s.send(%s, <metadata>)", context_signal.name, context_sender
+ )
context_signal.send(context_sender, metadata=metadata)
- return content_class(content=content, metadata=metadata,
- settings=self.settings, source_path=path,
- context=context)
+ return content_class(
+ content=content,
+ metadata=metadata,
+ settings=self.settings,
+ source_path=path,
+ context=context,
+ )
def find_empty_alt(content, path):
@@ -642,7 +662,8 @@ def find_empty_alt(content, path):
as they are really likely to be accessibility flaws.
"""
- imgs = re.compile(r"""
+ imgs = re.compile(
+ r"""
(?:
# src before alt
<img
@@ -658,53 +679,57 @@ def find_empty_alt(content, path):
[^\>]*
src=(['"])(.*?)\5
)
- """, re.X)
+ """,
+ re.X,
+ )
for match in re.findall(imgs, content):
logger.warning(
- 'Empty alt attribute for image %s in %s',
- os.path.basename(match[1] + match[5]), path,
- extra={'limit_msg': 'Other images have empty alt attributes'})
+ "Empty alt attribute for image %s in %s",
+ os.path.basename(match[1] + match[5]),
+ path,
+ extra={"limit_msg": "Other images have empty alt attributes"},
+ )
def default_metadata(settings=None, process=None):
metadata = {}
if settings:
- for name, value in dict(settings.get('DEFAULT_METADATA', {})).items():
+ for name, value in dict(settings.get("DEFAULT_METADATA", {})).items():
if process:
value = process(name, value)
metadata[name] = value
- if 'DEFAULT_CATEGORY' in settings:
- value = settings['DEFAULT_CATEGORY']
+ if "DEFAULT_CATEGORY" in settings:
+ value = settings["DEFAULT_CATEGORY"]
if process:
- value = process('category', value)
- metadata['category'] = value
- if settings.get('DEFAULT_DATE', None) and \
- settings['DEFAULT_DATE'] != 'fs':
- if isinstance(settings['DEFAULT_DATE'], str):
- metadata['date'] = get_date(settings['DEFAULT_DATE'])
+ value = process("category", value)
+ metadata["category"] = value
+ if settings.get("DEFAULT_DATE", None) and settings["DEFAULT_DATE"] != "fs":
+ if isinstance(settings["DEFAULT_DATE"], str):
+ metadata["date"] = get_date(settings["DEFAULT_DATE"])
else:
- metadata['date'] = datetime.datetime(*settings['DEFAULT_DATE'])
+ metadata["date"] = datetime.datetime(*settings["DEFAULT_DATE"])
return metadata
def path_metadata(full_path, source_path, settings=None):
metadata = {}
if settings:
- if settings.get('DEFAULT_DATE', None) == 'fs':
- metadata['date'] = datetime.datetime.fromtimestamp(
- os.stat(full_path).st_mtime)
- metadata['modified'] = metadata['date']
+ if settings.get("DEFAULT_DATE", None) == "fs":
+ metadata["date"] = datetime.datetime.fromtimestamp(
+ os.stat(full_path).st_mtime
+ )
+ metadata["modified"] = metadata["date"]
# Apply EXTRA_PATH_METADATA for the source path and the paths of any
# parent directories. Sorting EPM first ensures that the most specific
# path wins conflicts.
- epm = settings.get('EXTRA_PATH_METADATA', {})
+ epm = settings.get("EXTRA_PATH_METADATA", {})
for path, meta in sorted(epm.items()):
# Enforce a trailing slash when checking for parent directories.
# This prevents false positives when one file or directory's name
# is a prefix of another's.
- dirpath = posixize_path(os.path.join(path, ''))
+ dirpath = posixize_path(os.path.join(path, ""))
if source_path == path or source_path.startswith(dirpath):
metadata.update(meta)
@@ -736,11 +761,10 @@ def parse_path_metadata(source_path, settings=None, process=None):
subdir = os.path.basename(dirname)
if settings:
checks = []
- for key, data in [('FILENAME_METADATA', base),
- ('PATH_METADATA', source_path)]:
+ for key, data in [("FILENAME_METADATA", base), ("PATH_METADATA", source_path)]:
checks.append((settings.get(key, None), data))
- if settings.get('USE_FOLDER_AS_CATEGORY', None):
- checks.append(('(?P<category>.*)', subdir))
+ if settings.get("USE_FOLDER_AS_CATEGORY", None):
+ checks.append(("(?P<category>.*)", subdir))
for regexp, data in checks:
if regexp and data:
match = re.match(regexp, data)
diff --git a/pelican/rstdirectives.py b/pelican/rstdirectives.py
--- a/pelican/rstdirectives.py
+++ b/pelican/rstdirectives.py
@@ -11,26 +11,26 @@
class Pygments(Directive):
- """ Source code syntax highlighting.
- """
+ """Source code syntax highlighting."""
+
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {
- 'anchorlinenos': directives.flag,
- 'classprefix': directives.unchanged,
- 'hl_lines': directives.unchanged,
- 'lineanchors': directives.unchanged,
- 'linenos': directives.unchanged,
- 'linenospecial': directives.nonnegative_int,
- 'linenostart': directives.nonnegative_int,
- 'linenostep': directives.nonnegative_int,
- 'lineseparator': directives.unchanged,
- 'linespans': directives.unchanged,
- 'nobackground': directives.flag,
- 'nowrap': directives.flag,
- 'tagsfile': directives.unchanged,
- 'tagurlformat': directives.unchanged,
+ "anchorlinenos": directives.flag,
+ "classprefix": directives.unchanged,
+ "hl_lines": directives.unchanged,
+ "lineanchors": directives.unchanged,
+ "linenos": directives.unchanged,
+ "linenospecial": directives.nonnegative_int,
+ "linenostart": directives.nonnegative_int,
+ "linenostep": directives.nonnegative_int,
+ "lineseparator": directives.unchanged,
+ "linespans": directives.unchanged,
+ "nobackground": directives.flag,
+ "nowrap": directives.flag,
+ "tagsfile": directives.unchanged,
+ "tagurlformat": directives.unchanged,
}
has_content = True
@@ -49,28 +49,30 @@ def run(self):
if k not in self.options:
self.options[k] = v
- if ('linenos' in self.options and
- self.options['linenos'] not in ('table', 'inline')):
- if self.options['linenos'] == 'none':
- self.options.pop('linenos')
+ if "linenos" in self.options and self.options["linenos"] not in (
+ "table",
+ "inline",
+ ):
+ if self.options["linenos"] == "none":
+ self.options.pop("linenos")
else:
- self.options['linenos'] = 'table'
+ self.options["linenos"] = "table"
- for flag in ('nowrap', 'nobackground', 'anchorlinenos'):
+ for flag in ("nowrap", "nobackground", "anchorlinenos"):
if flag in self.options:
self.options[flag] = True
# noclasses should already default to False, but just in case...
formatter = HtmlFormatter(noclasses=False, **self.options)
- parsed = highlight('\n'.join(self.content), lexer, formatter)
- return [nodes.raw('', parsed, format='html')]
+ parsed = highlight("\n".join(self.content), lexer, formatter)
+ return [nodes.raw("", parsed, format="html")]
-directives.register_directive('code-block', Pygments)
-directives.register_directive('sourcecode', Pygments)
+directives.register_directive("code-block", Pygments)
+directives.register_directive("sourcecode", Pygments)
-_abbr_re = re.compile(r'\((.*)\)$', re.DOTALL)
+_abbr_re = re.compile(r"\((.*)\)$", re.DOTALL)
class abbreviation(nodes.Inline, nodes.TextElement):
@@ -82,9 +84,9 @@ def abbr_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
m = _abbr_re.search(text)
if m is None:
return [abbreviation(text, text)], []
- abbr = text[:m.start()].strip()
+ abbr = text[: m.start()].strip()
expl = m.group(1)
return [abbreviation(abbr, abbr, explanation=expl)], []
-roles.register_local_role('abbr', abbr_role)
+roles.register_local_role("abbr", abbr_role)
diff --git a/pelican/server.py b/pelican/server.py
--- a/pelican/server.py
+++ b/pelican/server.py
@@ -14,38 +14,47 @@
from pelican.log import console # noqa: F401
from pelican.log import init as init_logging
+
logger = logging.getLogger(__name__)
def parse_arguments():
parser = argparse.ArgumentParser(
- description='Pelican Development Server',
- formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ description="Pelican Development Server",
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ )
+ parser.add_argument(
+ "port", default=8000, type=int, nargs="?", help="Port to Listen On"
+ )
+ parser.add_argument("server", default="", nargs="?", help="Interface to Listen On")
+ parser.add_argument("--ssl", action="store_true", help="Activate SSL listener")
+ parser.add_argument(
+ "--cert",
+ default="./cert.pem",
+ nargs="?",
+ help="Path to certificate file. " + "Relative to current directory",
+ )
+ parser.add_argument(
+ "--key",
+ default="./key.pem",
+ nargs="?",
+ help="Path to certificate key file. " + "Relative to current directory",
+ )
+ parser.add_argument(
+ "--path",
+ default=".",
+ help="Path to pelican source directory to serve. "
+ + "Relative to current directory",
)
- parser.add_argument("port", default=8000, type=int, nargs="?",
- help="Port to Listen On")
- parser.add_argument("server", default="", nargs="?",
- help="Interface to Listen On")
- parser.add_argument('--ssl', action="store_true",
- help='Activate SSL listener')
- parser.add_argument('--cert', default="./cert.pem", nargs="?",
- help='Path to certificate file. ' +
- 'Relative to current directory')
- parser.add_argument('--key', default="./key.pem", nargs="?",
- help='Path to certificate key file. ' +
- 'Relative to current directory')
- parser.add_argument('--path', default=".",
- help='Path to pelican source directory to serve. ' +
- 'Relative to current directory')
return parser.parse_args()
class ComplexHTTPRequestHandler(server.SimpleHTTPRequestHandler):
- SUFFIXES = ['.html', '/index.html', '/', '']
+ SUFFIXES = [".html", "/index.html", "/", ""]
extensions_map = {
**server.SimpleHTTPRequestHandler.extensions_map,
- ** {
+ **{
# web fonts
".oft": "font/oft",
".sfnt": "font/sfnt",
@@ -57,13 +66,13 @@ class ComplexHTTPRequestHandler(server.SimpleHTTPRequestHandler):
def translate_path(self, path):
# abandon query parameters
- path = path.split('?', 1)[0]
- path = path.split('#', 1)[0]
+ path = path.split("?", 1)[0]
+ path = path.split("#", 1)[0]
# Don't forget explicit trailing slash when normalizing. Issue17324
- trailing_slash = path.rstrip().endswith('/')
+ trailing_slash = path.rstrip().endswith("/")
path = urllib.parse.unquote(path)
path = posixpath.normpath(path)
- words = path.split('/')
+ words = path.split("/")
words = filter(None, words)
path = self.base_path
for word in words:
@@ -72,12 +81,12 @@ def translate_path(self, path):
continue
path = os.path.join(path, word)
if trailing_slash:
- path += '/'
+ path += "/"
return path
def do_GET(self):
# cut off a query string
- original_path = self.path.split('?', 1)[0]
+ original_path = self.path.split("?", 1)[0]
# try to find file
self.path = self.get_path_that_exists(original_path)
@@ -88,12 +97,12 @@ def do_GET(self):
def get_path_that_exists(self, original_path):
# Try to strip trailing slash
- trailing_slash = original_path.endswith('/')
- original_path = original_path.rstrip('/')
+ trailing_slash = original_path.endswith("/")
+ original_path = original_path.rstrip("/")
# Try to detect file by applying various suffixes
tries = []
for suffix in self.SUFFIXES:
- if not trailing_slash and suffix == '/':
+ if not trailing_slash and suffix == "/":
# if original request does not have trailing slash, skip the '/' suffix
# so that base class can redirect if needed
continue
@@ -101,18 +110,17 @@ def get_path_that_exists(self, original_path):
if os.path.exists(self.translate_path(path)):
return path
tries.append(path)
- logger.warning("Unable to find `%s` or variations:\n%s",
- original_path,
- '\n'.join(tries))
+ logger.warning(
+ "Unable to find `%s` or variations:\n%s", original_path, "\n".join(tries)
+ )
return None
def guess_type(self, path):
- """Guess at the mime type for the specified file.
- """
+ """Guess at the mime type for the specified file."""
mimetype = server.SimpleHTTPRequestHandler.guess_type(self, path)
# If the default guess is too generic, try the python-magic library
- if mimetype == 'application/octet-stream' and magic_from_file:
+ if mimetype == "application/octet-stream" and magic_from_file:
mimetype = magic_from_file(path, mime=True)
return mimetype
@@ -127,31 +135,33 @@ def __init__(self, base_path, *args, **kwargs):
self.RequestHandlerClass.base_path = base_path
-if __name__ == '__main__':
+if __name__ == "__main__":
init_logging(level=logging.INFO)
- logger.warning("'python -m pelican.server' is deprecated.\nThe "
- "Pelican development server should be run via "
- "'pelican --listen' or 'pelican -l'.\nThis can be combined "
- "with regeneration as 'pelican -lr'.\nRerun 'pelican-"
- "quickstart' to get new Makefile and tasks.py files.")
+ logger.warning(
+ "'python -m pelican.server' is deprecated.\nThe "
+ "Pelican development server should be run via "
+ "'pelican --listen' or 'pelican -l'.\nThis can be combined "
+ "with regeneration as 'pelican -lr'.\nRerun 'pelican-"
+ "quickstart' to get new Makefile and tasks.py files."
+ )
args = parse_arguments()
RootedHTTPServer.allow_reuse_address = True
try:
httpd = RootedHTTPServer(
- args.path, (args.server, args.port), ComplexHTTPRequestHandler)
+ args.path, (args.server, args.port), ComplexHTTPRequestHandler
+ )
if args.ssl:
httpd.socket = ssl.wrap_socket(
- httpd.socket, keyfile=args.key,
- certfile=args.cert, server_side=True)
+ httpd.socket, keyfile=args.key, certfile=args.cert, server_side=True
+ )
except ssl.SSLError as e:
- logger.error("Couldn't open certificate file %s or key file %s",
- args.cert, args.key)
- logger.error("Could not listen on port %s, server %s.",
- args.port, args.server)
- sys.exit(getattr(e, 'exitcode', 1))
-
- logger.info("Serving at port %s, server %s.",
- args.port, args.server)
+ logger.error(
+ "Couldn't open certificate file %s or key file %s", args.cert, args.key
+ )
+ logger.error("Could not listen on port %s, server %s.", args.port, args.server)
+ sys.exit(getattr(e, "exitcode", 1))
+
+ logger.info("Serving at port %s, server %s.", args.port, args.server)
try:
httpd.serve_forever()
except KeyboardInterrupt:
diff --git a/pelican/settings.py b/pelican/settings.py
--- a/pelican/settings.py
+++ b/pelican/settings.py
@@ -22,150 +22,157 @@ def load_source(name, path):
logger = logging.getLogger(__name__)
-DEFAULT_THEME = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- 'themes', 'notmyidea')
+DEFAULT_THEME = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), "themes", "notmyidea"
+)
DEFAULT_CONFIG = {
- 'PATH': os.curdir,
- 'ARTICLE_PATHS': [''],
- 'ARTICLE_EXCLUDES': [],
- 'PAGE_PATHS': ['pages'],
- 'PAGE_EXCLUDES': [],
- 'THEME': DEFAULT_THEME,
- 'OUTPUT_PATH': 'output',
- 'READERS': {},
- 'STATIC_PATHS': ['images'],
- 'STATIC_EXCLUDES': [],
- 'STATIC_EXCLUDE_SOURCES': True,
- 'THEME_STATIC_DIR': 'theme',
- 'THEME_STATIC_PATHS': ['static', ],
- 'FEED_ALL_ATOM': 'feeds/all.atom.xml',
- 'CATEGORY_FEED_ATOM': 'feeds/{slug}.atom.xml',
- 'AUTHOR_FEED_ATOM': 'feeds/{slug}.atom.xml',
- 'AUTHOR_FEED_RSS': 'feeds/{slug}.rss.xml',
- 'TRANSLATION_FEED_ATOM': 'feeds/all-{lang}.atom.xml',
- 'FEED_MAX_ITEMS': 100,
- 'RSS_FEED_SUMMARY_ONLY': True,
- 'SITEURL': '',
- 'SITENAME': 'A Pelican Blog',
- 'DISPLAY_PAGES_ON_MENU': True,
- 'DISPLAY_CATEGORIES_ON_MENU': True,
- 'DOCUTILS_SETTINGS': {},
- 'OUTPUT_SOURCES': False,
- 'OUTPUT_SOURCES_EXTENSION': '.text',
- 'USE_FOLDER_AS_CATEGORY': True,
- 'DEFAULT_CATEGORY': 'misc',
- 'WITH_FUTURE_DATES': True,
- 'CSS_FILE': 'main.css',
- 'NEWEST_FIRST_ARCHIVES': True,
- 'REVERSE_CATEGORY_ORDER': False,
- 'DELETE_OUTPUT_DIRECTORY': False,
- 'OUTPUT_RETENTION': [],
- 'INDEX_SAVE_AS': 'index.html',
- 'ARTICLE_URL': '{slug}.html',
- 'ARTICLE_SAVE_AS': '{slug}.html',
- 'ARTICLE_ORDER_BY': 'reversed-date',
- 'ARTICLE_LANG_URL': '{slug}-{lang}.html',
- 'ARTICLE_LANG_SAVE_AS': '{slug}-{lang}.html',
- 'DRAFT_URL': 'drafts/{slug}.html',
- 'DRAFT_SAVE_AS': 'drafts/{slug}.html',
- 'DRAFT_LANG_URL': 'drafts/{slug}-{lang}.html',
- 'DRAFT_LANG_SAVE_AS': 'drafts/{slug}-{lang}.html',
- 'PAGE_URL': 'pages/{slug}.html',
- 'PAGE_SAVE_AS': 'pages/{slug}.html',
- 'PAGE_ORDER_BY': 'basename',
- 'PAGE_LANG_URL': 'pages/{slug}-{lang}.html',
- 'PAGE_LANG_SAVE_AS': 'pages/{slug}-{lang}.html',
- 'DRAFT_PAGE_URL': 'drafts/pages/{slug}.html',
- 'DRAFT_PAGE_SAVE_AS': 'drafts/pages/{slug}.html',
- 'DRAFT_PAGE_LANG_URL': 'drafts/pages/{slug}-{lang}.html',
- 'DRAFT_PAGE_LANG_SAVE_AS': 'drafts/pages/{slug}-{lang}.html',
- 'STATIC_URL': '{path}',
- 'STATIC_SAVE_AS': '{path}',
- 'STATIC_CREATE_LINKS': False,
- 'STATIC_CHECK_IF_MODIFIED': False,
- 'CATEGORY_URL': 'category/{slug}.html',
- 'CATEGORY_SAVE_AS': 'category/{slug}.html',
- 'TAG_URL': 'tag/{slug}.html',
- 'TAG_SAVE_AS': 'tag/{slug}.html',
- 'AUTHOR_URL': 'author/{slug}.html',
- 'AUTHOR_SAVE_AS': 'author/{slug}.html',
- 'PAGINATION_PATTERNS': [
- (1, '{name}{extension}', '{name}{extension}'),
- (2, '{name}{number}{extension}', '{name}{number}{extension}'),
+ "PATH": os.curdir,
+ "ARTICLE_PATHS": [""],
+ "ARTICLE_EXCLUDES": [],
+ "PAGE_PATHS": ["pages"],
+ "PAGE_EXCLUDES": [],
+ "THEME": DEFAULT_THEME,
+ "OUTPUT_PATH": "output",
+ "READERS": {},
+ "STATIC_PATHS": ["images"],
+ "STATIC_EXCLUDES": [],
+ "STATIC_EXCLUDE_SOURCES": True,
+ "THEME_STATIC_DIR": "theme",
+ "THEME_STATIC_PATHS": [
+ "static",
],
- 'YEAR_ARCHIVE_URL': '',
- 'YEAR_ARCHIVE_SAVE_AS': '',
- 'MONTH_ARCHIVE_URL': '',
- 'MONTH_ARCHIVE_SAVE_AS': '',
- 'DAY_ARCHIVE_URL': '',
- 'DAY_ARCHIVE_SAVE_AS': '',
- 'RELATIVE_URLS': False,
- 'DEFAULT_LANG': 'en',
- 'ARTICLE_TRANSLATION_ID': 'slug',
- 'PAGE_TRANSLATION_ID': 'slug',
- 'DIRECT_TEMPLATES': ['index', 'tags', 'categories', 'authors', 'archives'],
- 'THEME_TEMPLATES_OVERRIDES': [],
- 'PAGINATED_TEMPLATES': {'index': None, 'tag': None, 'category': None,
- 'author': None},
- 'PELICAN_CLASS': 'pelican.Pelican',
- 'DEFAULT_DATE_FORMAT': '%a %d %B %Y',
- 'DATE_FORMATS': {},
- 'MARKDOWN': {
- 'extension_configs': {
- 'markdown.extensions.codehilite': {'css_class': 'highlight'},
- 'markdown.extensions.extra': {},
- 'markdown.extensions.meta': {},
+ "FEED_ALL_ATOM": "feeds/all.atom.xml",
+ "CATEGORY_FEED_ATOM": "feeds/{slug}.atom.xml",
+ "AUTHOR_FEED_ATOM": "feeds/{slug}.atom.xml",
+ "AUTHOR_FEED_RSS": "feeds/{slug}.rss.xml",
+ "TRANSLATION_FEED_ATOM": "feeds/all-{lang}.atom.xml",
+ "FEED_MAX_ITEMS": 100,
+ "RSS_FEED_SUMMARY_ONLY": True,
+ "SITEURL": "",
+ "SITENAME": "A Pelican Blog",
+ "DISPLAY_PAGES_ON_MENU": True,
+ "DISPLAY_CATEGORIES_ON_MENU": True,
+ "DOCUTILS_SETTINGS": {},
+ "OUTPUT_SOURCES": False,
+ "OUTPUT_SOURCES_EXTENSION": ".text",
+ "USE_FOLDER_AS_CATEGORY": True,
+ "DEFAULT_CATEGORY": "misc",
+ "WITH_FUTURE_DATES": True,
+ "CSS_FILE": "main.css",
+ "NEWEST_FIRST_ARCHIVES": True,
+ "REVERSE_CATEGORY_ORDER": False,
+ "DELETE_OUTPUT_DIRECTORY": False,
+ "OUTPUT_RETENTION": [],
+ "INDEX_SAVE_AS": "index.html",
+ "ARTICLE_URL": "{slug}.html",
+ "ARTICLE_SAVE_AS": "{slug}.html",
+ "ARTICLE_ORDER_BY": "reversed-date",
+ "ARTICLE_LANG_URL": "{slug}-{lang}.html",
+ "ARTICLE_LANG_SAVE_AS": "{slug}-{lang}.html",
+ "DRAFT_URL": "drafts/{slug}.html",
+ "DRAFT_SAVE_AS": "drafts/{slug}.html",
+ "DRAFT_LANG_URL": "drafts/{slug}-{lang}.html",
+ "DRAFT_LANG_SAVE_AS": "drafts/{slug}-{lang}.html",
+ "PAGE_URL": "pages/{slug}.html",
+ "PAGE_SAVE_AS": "pages/{slug}.html",
+ "PAGE_ORDER_BY": "basename",
+ "PAGE_LANG_URL": "pages/{slug}-{lang}.html",
+ "PAGE_LANG_SAVE_AS": "pages/{slug}-{lang}.html",
+ "DRAFT_PAGE_URL": "drafts/pages/{slug}.html",
+ "DRAFT_PAGE_SAVE_AS": "drafts/pages/{slug}.html",
+ "DRAFT_PAGE_LANG_URL": "drafts/pages/{slug}-{lang}.html",
+ "DRAFT_PAGE_LANG_SAVE_AS": "drafts/pages/{slug}-{lang}.html",
+ "STATIC_URL": "{path}",
+ "STATIC_SAVE_AS": "{path}",
+ "STATIC_CREATE_LINKS": False,
+ "STATIC_CHECK_IF_MODIFIED": False,
+ "CATEGORY_URL": "category/{slug}.html",
+ "CATEGORY_SAVE_AS": "category/{slug}.html",
+ "TAG_URL": "tag/{slug}.html",
+ "TAG_SAVE_AS": "tag/{slug}.html",
+ "AUTHOR_URL": "author/{slug}.html",
+ "AUTHOR_SAVE_AS": "author/{slug}.html",
+ "PAGINATION_PATTERNS": [
+ (1, "{name}{extension}", "{name}{extension}"),
+ (2, "{name}{number}{extension}", "{name}{number}{extension}"),
+ ],
+ "YEAR_ARCHIVE_URL": "",
+ "YEAR_ARCHIVE_SAVE_AS": "",
+ "MONTH_ARCHIVE_URL": "",
+ "MONTH_ARCHIVE_SAVE_AS": "",
+ "DAY_ARCHIVE_URL": "",
+ "DAY_ARCHIVE_SAVE_AS": "",
+ "RELATIVE_URLS": False,
+ "DEFAULT_LANG": "en",
+ "ARTICLE_TRANSLATION_ID": "slug",
+ "PAGE_TRANSLATION_ID": "slug",
+ "DIRECT_TEMPLATES": ["index", "tags", "categories", "authors", "archives"],
+ "THEME_TEMPLATES_OVERRIDES": [],
+ "PAGINATED_TEMPLATES": {
+ "index": None,
+ "tag": None,
+ "category": None,
+ "author": None,
+ },
+ "PELICAN_CLASS": "pelican.Pelican",
+ "DEFAULT_DATE_FORMAT": "%a %d %B %Y",
+ "DATE_FORMATS": {},
+ "MARKDOWN": {
+ "extension_configs": {
+ "markdown.extensions.codehilite": {"css_class": "highlight"},
+ "markdown.extensions.extra": {},
+ "markdown.extensions.meta": {},
},
- 'output_format': 'html5',
+ "output_format": "html5",
},
- 'JINJA_FILTERS': {},
- 'JINJA_GLOBALS': {},
- 'JINJA_TESTS': {},
- 'JINJA_ENVIRONMENT': {
- 'trim_blocks': True,
- 'lstrip_blocks': True,
- 'extensions': [],
+ "JINJA_FILTERS": {},
+ "JINJA_GLOBALS": {},
+ "JINJA_TESTS": {},
+ "JINJA_ENVIRONMENT": {
+ "trim_blocks": True,
+ "lstrip_blocks": True,
+ "extensions": [],
},
- 'LOG_FILTER': [],
- 'LOCALE': [''], # defaults to user locale
- 'DEFAULT_PAGINATION': False,
- 'DEFAULT_ORPHANS': 0,
- 'DEFAULT_METADATA': {},
- 'FILENAME_METADATA': r'(?P<date>\d{4}-\d{2}-\d{2}).*',
- 'PATH_METADATA': '',
- 'EXTRA_PATH_METADATA': {},
- 'ARTICLE_PERMALINK_STRUCTURE': '',
- 'TYPOGRIFY': False,
- 'TYPOGRIFY_IGNORE_TAGS': [],
- 'TYPOGRIFY_DASHES': 'default',
- 'SUMMARY_END_SUFFIX': '…',
- 'SUMMARY_MAX_LENGTH': 50,
- 'PLUGIN_PATHS': [],
- 'PLUGINS': None,
- 'PYGMENTS_RST_OPTIONS': {},
- 'TEMPLATE_PAGES': {},
- 'TEMPLATE_EXTENSIONS': ['.html'],
- 'IGNORE_FILES': ['.#*'],
- 'SLUG_REGEX_SUBSTITUTIONS': [
- (r'[^\w\s-]', ''), # remove non-alphabetical/whitespace/'-' chars
- (r'(?u)\A\s*', ''), # strip leading whitespace
- (r'(?u)\s*\Z', ''), # strip trailing whitespace
- (r'[-\s]+', '-'), # reduce multiple whitespace or '-' to single '-'
+ "LOG_FILTER": [],
+ "LOCALE": [""], # defaults to user locale
+ "DEFAULT_PAGINATION": False,
+ "DEFAULT_ORPHANS": 0,
+ "DEFAULT_METADATA": {},
+ "FILENAME_METADATA": r"(?P<date>\d{4}-\d{2}-\d{2}).*",
+ "PATH_METADATA": "",
+ "EXTRA_PATH_METADATA": {},
+ "ARTICLE_PERMALINK_STRUCTURE": "",
+ "TYPOGRIFY": False,
+ "TYPOGRIFY_IGNORE_TAGS": [],
+ "TYPOGRIFY_DASHES": "default",
+ "SUMMARY_END_SUFFIX": "…",
+ "SUMMARY_MAX_LENGTH": 50,
+ "PLUGIN_PATHS": [],
+ "PLUGINS": None,
+ "PYGMENTS_RST_OPTIONS": {},
+ "TEMPLATE_PAGES": {},
+ "TEMPLATE_EXTENSIONS": [".html"],
+ "IGNORE_FILES": [".#*"],
+ "SLUG_REGEX_SUBSTITUTIONS": [
+ (r"[^\w\s-]", ""), # remove non-alphabetical/whitespace/'-' chars
+ (r"(?u)\A\s*", ""), # strip leading whitespace
+ (r"(?u)\s*\Z", ""), # strip trailing whitespace
+ (r"[-\s]+", "-"), # reduce multiple whitespace or '-' to single '-'
],
- 'INTRASITE_LINK_REGEX': '[{|](?P<what>.*?)[|}]',
- 'SLUGIFY_SOURCE': 'title',
- 'SLUGIFY_USE_UNICODE': False,
- 'SLUGIFY_PRESERVE_CASE': False,
- 'CACHE_CONTENT': False,
- 'CONTENT_CACHING_LAYER': 'reader',
- 'CACHE_PATH': 'cache',
- 'GZIP_CACHE': True,
- 'CHECK_MODIFIED_METHOD': 'mtime',
- 'LOAD_CONTENT_CACHE': False,
- 'WRITE_SELECTED': [],
- 'FORMATTED_FIELDS': ['summary'],
- 'PORT': 8000,
- 'BIND': '127.0.0.1',
+ "INTRASITE_LINK_REGEX": "[{|](?P<what>.*?)[|}]",
+ "SLUGIFY_SOURCE": "title",
+ "SLUGIFY_USE_UNICODE": False,
+ "SLUGIFY_PRESERVE_CASE": False,
+ "CACHE_CONTENT": False,
+ "CONTENT_CACHING_LAYER": "reader",
+ "CACHE_PATH": "cache",
+ "GZIP_CACHE": True,
+ "CHECK_MODIFIED_METHOD": "mtime",
+ "LOAD_CONTENT_CACHE": False,
+ "WRITE_SELECTED": [],
+ "FORMATTED_FIELDS": ["summary"],
+ "PORT": 8000,
+ "BIND": "127.0.0.1",
}
PYGMENTS_RST_OPTIONS = None
@@ -185,20 +192,23 @@ def read_settings(path=None, override=None):
def getabs(maybe_relative, base_path=path):
if isabs(maybe_relative):
return maybe_relative
- return os.path.abspath(os.path.normpath(os.path.join(
- os.path.dirname(base_path), maybe_relative)))
+ return os.path.abspath(
+ os.path.normpath(
+ os.path.join(os.path.dirname(base_path), maybe_relative)
+ )
+ )
- for p in ['PATH', 'OUTPUT_PATH', 'THEME', 'CACHE_PATH']:
+ for p in ["PATH", "OUTPUT_PATH", "THEME", "CACHE_PATH"]:
if settings.get(p) is not None:
absp = getabs(settings[p])
# THEME may be a name rather than a path
- if p != 'THEME' or os.path.exists(absp):
+ if p != "THEME" or os.path.exists(absp):
settings[p] = absp
- if settings.get('PLUGIN_PATHS') is not None:
- settings['PLUGIN_PATHS'] = [getabs(pluginpath)
- for pluginpath
- in settings['PLUGIN_PATHS']]
+ if settings.get("PLUGIN_PATHS") is not None:
+ settings["PLUGIN_PATHS"] = [
+ getabs(pluginpath) for pluginpath in settings["PLUGIN_PATHS"]
+ ]
settings = dict(copy.deepcopy(DEFAULT_CONFIG), **settings)
settings = configure_settings(settings)
@@ -208,7 +218,7 @@ def getabs(maybe_relative, base_path=path):
# variable here that we'll import from within Pygments.run (see
# rstdirectives.py) to see what the user defaults were.
global PYGMENTS_RST_OPTIONS
- PYGMENTS_RST_OPTIONS = settings.get('PYGMENTS_RST_OPTIONS', None)
+ PYGMENTS_RST_OPTIONS = settings.get("PYGMENTS_RST_OPTIONS", None)
return settings
@@ -217,8 +227,7 @@ def get_settings_from_module(module=None):
context = {}
if module is not None:
- context.update(
- (k, v) for k, v in inspect.getmembers(module) if k.isupper())
+ context.update((k, v) for k, v in inspect.getmembers(module) if k.isupper())
return context
@@ -233,11 +242,12 @@ def get_settings_from_file(path):
def get_jinja_environment(settings):
"""Sets the environment for Jinja"""
- jinja_env = settings.setdefault('JINJA_ENVIRONMENT',
- DEFAULT_CONFIG['JINJA_ENVIRONMENT'])
+ jinja_env = settings.setdefault(
+ "JINJA_ENVIRONMENT", DEFAULT_CONFIG["JINJA_ENVIRONMENT"]
+ )
# Make sure we include the defaults if the user has set env variables
- for key, value in DEFAULT_CONFIG['JINJA_ENVIRONMENT'].items():
+ for key, value in DEFAULT_CONFIG["JINJA_ENVIRONMENT"].items():
if key not in jinja_env:
jinja_env[key] = value
@@ -248,14 +258,14 @@ def _printf_s_to_format_field(printf_string, format_field):
"""Tries to replace %s with {format_field} in the provided printf_string.
Raises ValueError in case of failure.
"""
- TEST_STRING = 'PELICAN_PRINTF_S_DEPRECATION'
+ TEST_STRING = "PELICAN_PRINTF_S_DEPRECATION"
expected = printf_string % TEST_STRING
- result = printf_string.replace('{', '{{').replace('}', '}}') \
- % '{{{}}}'.format(format_field)
+ result = printf_string.replace("{", "{{").replace("}", "}}") % "{{{}}}".format(
+ format_field
+ )
if result.format(**{format_field: TEST_STRING}) != expected:
- raise ValueError('Failed to safely replace %s with {{{}}}'.format(
- format_field))
+ raise ValueError("Failed to safely replace %s with {{{}}}".format(format_field))
return result
@@ -266,115 +276,140 @@ def handle_deprecated_settings(settings):
"""
# PLUGIN_PATH -> PLUGIN_PATHS
- if 'PLUGIN_PATH' in settings:
- logger.warning('PLUGIN_PATH setting has been replaced by '
- 'PLUGIN_PATHS, moving it to the new setting name.')
- settings['PLUGIN_PATHS'] = settings['PLUGIN_PATH']
- del settings['PLUGIN_PATH']
+ if "PLUGIN_PATH" in settings:
+ logger.warning(
+ "PLUGIN_PATH setting has been replaced by "
+ "PLUGIN_PATHS, moving it to the new setting name."
+ )
+ settings["PLUGIN_PATHS"] = settings["PLUGIN_PATH"]
+ del settings["PLUGIN_PATH"]
# PLUGIN_PATHS: str -> [str]
- if isinstance(settings.get('PLUGIN_PATHS'), str):
- logger.warning("Defining PLUGIN_PATHS setting as string "
- "has been deprecated (should be a list)")
- settings['PLUGIN_PATHS'] = [settings['PLUGIN_PATHS']]
+ if isinstance(settings.get("PLUGIN_PATHS"), str):
+ logger.warning(
+ "Defining PLUGIN_PATHS setting as string "
+ "has been deprecated (should be a list)"
+ )
+ settings["PLUGIN_PATHS"] = [settings["PLUGIN_PATHS"]]
# JINJA_EXTENSIONS -> JINJA_ENVIRONMENT > extensions
- if 'JINJA_EXTENSIONS' in settings:
- logger.warning('JINJA_EXTENSIONS setting has been deprecated, '
- 'moving it to JINJA_ENVIRONMENT setting.')
- settings['JINJA_ENVIRONMENT']['extensions'] = \
- settings['JINJA_EXTENSIONS']
- del settings['JINJA_EXTENSIONS']
+ if "JINJA_EXTENSIONS" in settings:
+ logger.warning(
+ "JINJA_EXTENSIONS setting has been deprecated, "
+ "moving it to JINJA_ENVIRONMENT setting."
+ )
+ settings["JINJA_ENVIRONMENT"]["extensions"] = settings["JINJA_EXTENSIONS"]
+ del settings["JINJA_EXTENSIONS"]
# {ARTICLE,PAGE}_DIR -> {ARTICLE,PAGE}_PATHS
- for key in ['ARTICLE', 'PAGE']:
- old_key = key + '_DIR'
- new_key = key + '_PATHS'
+ for key in ["ARTICLE", "PAGE"]:
+ old_key = key + "_DIR"
+ new_key = key + "_PATHS"
if old_key in settings:
logger.warning(
- 'Deprecated setting %s, moving it to %s list',
- old_key, new_key)
- settings[new_key] = [settings[old_key]] # also make a list
+ "Deprecated setting %s, moving it to %s list", old_key, new_key
+ )
+ settings[new_key] = [settings[old_key]] # also make a list
del settings[old_key]
# EXTRA_TEMPLATES_PATHS -> THEME_TEMPLATES_OVERRIDES
- if 'EXTRA_TEMPLATES_PATHS' in settings:
- logger.warning('EXTRA_TEMPLATES_PATHS is deprecated use '
- 'THEME_TEMPLATES_OVERRIDES instead.')
- if ('THEME_TEMPLATES_OVERRIDES' in settings and
- settings['THEME_TEMPLATES_OVERRIDES']):
+ if "EXTRA_TEMPLATES_PATHS" in settings:
+ logger.warning(
+ "EXTRA_TEMPLATES_PATHS is deprecated use "
+ "THEME_TEMPLATES_OVERRIDES instead."
+ )
+ if (
+ "THEME_TEMPLATES_OVERRIDES" in settings
+ and settings["THEME_TEMPLATES_OVERRIDES"]
+ ):
raise Exception(
- 'Setting both EXTRA_TEMPLATES_PATHS and '
- 'THEME_TEMPLATES_OVERRIDES is not permitted. Please move to '
- 'only setting THEME_TEMPLATES_OVERRIDES.')
- settings['THEME_TEMPLATES_OVERRIDES'] = \
- settings['EXTRA_TEMPLATES_PATHS']
- del settings['EXTRA_TEMPLATES_PATHS']
+ "Setting both EXTRA_TEMPLATES_PATHS and "
+ "THEME_TEMPLATES_OVERRIDES is not permitted. Please move to "
+ "only setting THEME_TEMPLATES_OVERRIDES."
+ )
+ settings["THEME_TEMPLATES_OVERRIDES"] = settings["EXTRA_TEMPLATES_PATHS"]
+ del settings["EXTRA_TEMPLATES_PATHS"]
# MD_EXTENSIONS -> MARKDOWN
- if 'MD_EXTENSIONS' in settings:
- logger.warning('MD_EXTENSIONS is deprecated use MARKDOWN '
- 'instead. Falling back to the default.')
- settings['MARKDOWN'] = DEFAULT_CONFIG['MARKDOWN']
+ if "MD_EXTENSIONS" in settings:
+ logger.warning(
+ "MD_EXTENSIONS is deprecated use MARKDOWN "
+ "instead. Falling back to the default."
+ )
+ settings["MARKDOWN"] = DEFAULT_CONFIG["MARKDOWN"]
# LESS_GENERATOR -> Webassets plugin
# FILES_TO_COPY -> STATIC_PATHS, EXTRA_PATH_METADATA
for old, new, doc in [
- ('LESS_GENERATOR', 'the Webassets plugin', None),
- ('FILES_TO_COPY', 'STATIC_PATHS and EXTRA_PATH_METADATA',
- 'https://github.com/getpelican/pelican/'
- 'blob/master/docs/settings.rst#path-metadata'),
+ ("LESS_GENERATOR", "the Webassets plugin", None),
+ (
+ "FILES_TO_COPY",
+ "STATIC_PATHS and EXTRA_PATH_METADATA",
+ "https://github.com/getpelican/pelican/"
+ "blob/master/docs/settings.rst#path-metadata",
+ ),
]:
if old in settings:
- message = 'The {} setting has been removed in favor of {}'.format(
- old, new)
+ message = "The {} setting has been removed in favor of {}".format(old, new)
if doc:
- message += ', see {} for details'.format(doc)
+ message += ", see {} for details".format(doc)
logger.warning(message)
# PAGINATED_DIRECT_TEMPLATES -> PAGINATED_TEMPLATES
- if 'PAGINATED_DIRECT_TEMPLATES' in settings:
- message = 'The {} setting has been removed in favor of {}'.format(
- 'PAGINATED_DIRECT_TEMPLATES', 'PAGINATED_TEMPLATES')
+ if "PAGINATED_DIRECT_TEMPLATES" in settings:
+ message = "The {} setting has been removed in favor of {}".format(
+ "PAGINATED_DIRECT_TEMPLATES", "PAGINATED_TEMPLATES"
+ )
logger.warning(message)
# set PAGINATED_TEMPLATES
- if 'PAGINATED_TEMPLATES' not in settings:
- settings['PAGINATED_TEMPLATES'] = {
- 'tag': None, 'category': None, 'author': None}
-
- for t in settings['PAGINATED_DIRECT_TEMPLATES']:
- if t not in settings['PAGINATED_TEMPLATES']:
- settings['PAGINATED_TEMPLATES'][t] = None
- del settings['PAGINATED_DIRECT_TEMPLATES']
+ if "PAGINATED_TEMPLATES" not in settings:
+ settings["PAGINATED_TEMPLATES"] = {
+ "tag": None,
+ "category": None,
+ "author": None,
+ }
+
+ for t in settings["PAGINATED_DIRECT_TEMPLATES"]:
+ if t not in settings["PAGINATED_TEMPLATES"]:
+ settings["PAGINATED_TEMPLATES"][t] = None
+ del settings["PAGINATED_DIRECT_TEMPLATES"]
# {SLUG,CATEGORY,TAG,AUTHOR}_SUBSTITUTIONS ->
# {SLUG,CATEGORY,TAG,AUTHOR}_REGEX_SUBSTITUTIONS
- url_settings_url = \
- 'http://docs.getpelican.com/en/latest/settings.html#url-settings'
- flavours = {'SLUG', 'CATEGORY', 'TAG', 'AUTHOR'}
- old_values = {f: settings[f + '_SUBSTITUTIONS']
- for f in flavours if f + '_SUBSTITUTIONS' in settings}
- new_values = {f: settings[f + '_REGEX_SUBSTITUTIONS']
- for f in flavours if f + '_REGEX_SUBSTITUTIONS' in settings}
+ url_settings_url = "http://docs.getpelican.com/en/latest/settings.html#url-settings"
+ flavours = {"SLUG", "CATEGORY", "TAG", "AUTHOR"}
+ old_values = {
+ f: settings[f + "_SUBSTITUTIONS"]
+ for f in flavours
+ if f + "_SUBSTITUTIONS" in settings
+ }
+ new_values = {
+ f: settings[f + "_REGEX_SUBSTITUTIONS"]
+ for f in flavours
+ if f + "_REGEX_SUBSTITUTIONS" in settings
+ }
if old_values and new_values:
raise Exception(
- 'Setting both {new_key} and {old_key} (or variants thereof) is '
- 'not permitted. Please move to only setting {new_key}.'
- .format(old_key='SLUG_SUBSTITUTIONS',
- new_key='SLUG_REGEX_SUBSTITUTIONS'))
+ "Setting both {new_key} and {old_key} (or variants thereof) is "
+ "not permitted. Please move to only setting {new_key}.".format(
+ old_key="SLUG_SUBSTITUTIONS", new_key="SLUG_REGEX_SUBSTITUTIONS"
+ )
+ )
if old_values:
- message = ('{} and variants thereof are deprecated and will be '
- 'removed in the future. Please use {} and variants thereof '
- 'instead. Check {}.'
- .format('SLUG_SUBSTITUTIONS', 'SLUG_REGEX_SUBSTITUTIONS',
- url_settings_url))
+ message = (
+ "{} and variants thereof are deprecated and will be "
+ "removed in the future. Please use {} and variants thereof "
+ "instead. Check {}.".format(
+ "SLUG_SUBSTITUTIONS", "SLUG_REGEX_SUBSTITUTIONS", url_settings_url
+ )
+ )
logger.warning(message)
- if old_values.get('SLUG'):
- for f in {'CATEGORY', 'TAG'}:
+ if old_values.get("SLUG"):
+ for f in {"CATEGORY", "TAG"}:
if old_values.get(f):
- old_values[f] = old_values['SLUG'] + old_values[f]
- old_values['AUTHOR'] = old_values.get('AUTHOR', [])
+ old_values[f] = old_values["SLUG"] + old_values[f]
+ old_values["AUTHOR"] = old_values.get("AUTHOR", [])
for f in flavours:
if old_values.get(f) is not None:
regex_subs = []
@@ -387,120 +422,138 @@ def handle_deprecated_settings(settings):
replace = False
except ValueError:
src, dst = tpl
- regex_subs.append(
- (re.escape(src), dst.replace('\\', r'\\')))
+ regex_subs.append((re.escape(src), dst.replace("\\", r"\\")))
if replace:
regex_subs += [
- (r'[^\w\s-]', ''),
- (r'(?u)\A\s*', ''),
- (r'(?u)\s*\Z', ''),
- (r'[-\s]+', '-'),
+ (r"[^\w\s-]", ""),
+ (r"(?u)\A\s*", ""),
+ (r"(?u)\s*\Z", ""),
+ (r"[-\s]+", "-"),
]
else:
regex_subs += [
- (r'(?u)\A\s*', ''),
- (r'(?u)\s*\Z', ''),
+ (r"(?u)\A\s*", ""),
+ (r"(?u)\s*\Z", ""),
]
- settings[f + '_REGEX_SUBSTITUTIONS'] = regex_subs
- settings.pop(f + '_SUBSTITUTIONS', None)
+ settings[f + "_REGEX_SUBSTITUTIONS"] = regex_subs
+ settings.pop(f + "_SUBSTITUTIONS", None)
# `%s` -> '{slug}` or `{lang}` in FEED settings
- for key in ['TRANSLATION_FEED_ATOM',
- 'TRANSLATION_FEED_RSS'
- ]:
+ for key in ["TRANSLATION_FEED_ATOM", "TRANSLATION_FEED_RSS"]:
if (
- settings.get(key) and not isinstance(settings[key], Path)
- and '%s' in settings[key]
+ settings.get(key)
+ and not isinstance(settings[key], Path)
+ and "%s" in settings[key]
):
- logger.warning('%%s usage in %s is deprecated, use {lang} '
- 'instead.', key)
+ logger.warning("%%s usage in %s is deprecated, use {lang} " "instead.", key)
try:
- settings[key] = _printf_s_to_format_field(
- settings[key], 'lang')
+ settings[key] = _printf_s_to_format_field(settings[key], "lang")
except ValueError:
- logger.warning('Failed to convert %%s to {lang} for %s. '
- 'Falling back to default.', key)
+ logger.warning(
+ "Failed to convert %%s to {lang} for %s. "
+ "Falling back to default.",
+ key,
+ )
settings[key] = DEFAULT_CONFIG[key]
- for key in ['AUTHOR_FEED_ATOM',
- 'AUTHOR_FEED_RSS',
- 'CATEGORY_FEED_ATOM',
- 'CATEGORY_FEED_RSS',
- 'TAG_FEED_ATOM',
- 'TAG_FEED_RSS',
- ]:
+ for key in [
+ "AUTHOR_FEED_ATOM",
+ "AUTHOR_FEED_RSS",
+ "CATEGORY_FEED_ATOM",
+ "CATEGORY_FEED_RSS",
+ "TAG_FEED_ATOM",
+ "TAG_FEED_RSS",
+ ]:
if (
- settings.get(key) and not isinstance(settings[key], Path)
- and '%s' in settings[key]
+ settings.get(key)
+ and not isinstance(settings[key], Path)
+ and "%s" in settings[key]
):
- logger.warning('%%s usage in %s is deprecated, use {slug} '
- 'instead.', key)
+ logger.warning("%%s usage in %s is deprecated, use {slug} " "instead.", key)
try:
- settings[key] = _printf_s_to_format_field(
- settings[key], 'slug')
+ settings[key] = _printf_s_to_format_field(settings[key], "slug")
except ValueError:
- logger.warning('Failed to convert %%s to {slug} for %s. '
- 'Falling back to default.', key)
+ logger.warning(
+ "Failed to convert %%s to {slug} for %s. "
+ "Falling back to default.",
+ key,
+ )
settings[key] = DEFAULT_CONFIG[key]
# CLEAN_URLS
- if settings.get('CLEAN_URLS', False):
- logger.warning('Found deprecated `CLEAN_URLS` in settings.'
- ' Modifying the following settings for the'
- ' same behaviour.')
-
- settings['ARTICLE_URL'] = '{slug}/'
- settings['ARTICLE_LANG_URL'] = '{slug}-{lang}/'
- settings['PAGE_URL'] = 'pages/{slug}/'
- settings['PAGE_LANG_URL'] = 'pages/{slug}-{lang}/'
-
- for setting in ('ARTICLE_URL', 'ARTICLE_LANG_URL', 'PAGE_URL',
- 'PAGE_LANG_URL'):
+ if settings.get("CLEAN_URLS", False):
+ logger.warning(
+ "Found deprecated `CLEAN_URLS` in settings."
+ " Modifying the following settings for the"
+ " same behaviour."
+ )
+
+ settings["ARTICLE_URL"] = "{slug}/"
+ settings["ARTICLE_LANG_URL"] = "{slug}-{lang}/"
+ settings["PAGE_URL"] = "pages/{slug}/"
+ settings["PAGE_LANG_URL"] = "pages/{slug}-{lang}/"
+
+ for setting in ("ARTICLE_URL", "ARTICLE_LANG_URL", "PAGE_URL", "PAGE_LANG_URL"):
logger.warning("%s = '%s'", setting, settings[setting])
# AUTORELOAD_IGNORE_CACHE -> --ignore-cache
- if settings.get('AUTORELOAD_IGNORE_CACHE'):
- logger.warning('Found deprecated `AUTORELOAD_IGNORE_CACHE` in '
- 'settings. Use --ignore-cache instead.')
- settings.pop('AUTORELOAD_IGNORE_CACHE')
+ if settings.get("AUTORELOAD_IGNORE_CACHE"):
+ logger.warning(
+ "Found deprecated `AUTORELOAD_IGNORE_CACHE` in "
+ "settings. Use --ignore-cache instead."
+ )
+ settings.pop("AUTORELOAD_IGNORE_CACHE")
# ARTICLE_PERMALINK_STRUCTURE
- if settings.get('ARTICLE_PERMALINK_STRUCTURE', False):
- logger.warning('Found deprecated `ARTICLE_PERMALINK_STRUCTURE` in'
- ' settings. Modifying the following settings for'
- ' the same behaviour.')
+ if settings.get("ARTICLE_PERMALINK_STRUCTURE", False):
+ logger.warning(
+ "Found deprecated `ARTICLE_PERMALINK_STRUCTURE` in"
+ " settings. Modifying the following settings for"
+ " the same behaviour."
+ )
- structure = settings['ARTICLE_PERMALINK_STRUCTURE']
+ structure = settings["ARTICLE_PERMALINK_STRUCTURE"]
# Convert %(variable) into {variable}.
- structure = re.sub(r'%\((\w+)\)s', r'{\g<1>}', structure)
+ structure = re.sub(r"%\((\w+)\)s", r"{\g<1>}", structure)
# Convert %x into {date:%x} for strftime
- structure = re.sub(r'(%[A-z])', r'{date:\g<1>}', structure)
+ structure = re.sub(r"(%[A-z])", r"{date:\g<1>}", structure)
# Strip a / prefix
- structure = re.sub('^/', '', structure)
-
- for setting in ('ARTICLE_URL', 'ARTICLE_LANG_URL', 'PAGE_URL',
- 'PAGE_LANG_URL', 'DRAFT_URL', 'DRAFT_LANG_URL',
- 'ARTICLE_SAVE_AS', 'ARTICLE_LANG_SAVE_AS',
- 'DRAFT_SAVE_AS', 'DRAFT_LANG_SAVE_AS',
- 'PAGE_SAVE_AS', 'PAGE_LANG_SAVE_AS'):
- settings[setting] = os.path.join(structure,
- settings[setting])
+ structure = re.sub("^/", "", structure)
+
+ for setting in (
+ "ARTICLE_URL",
+ "ARTICLE_LANG_URL",
+ "PAGE_URL",
+ "PAGE_LANG_URL",
+ "DRAFT_URL",
+ "DRAFT_LANG_URL",
+ "ARTICLE_SAVE_AS",
+ "ARTICLE_LANG_SAVE_AS",
+ "DRAFT_SAVE_AS",
+ "DRAFT_LANG_SAVE_AS",
+ "PAGE_SAVE_AS",
+ "PAGE_LANG_SAVE_AS",
+ ):
+ settings[setting] = os.path.join(structure, settings[setting])
logger.warning("%s = '%s'", setting, settings[setting])
# {,TAG,CATEGORY,TRANSLATION}_FEED -> {,TAG,CATEGORY,TRANSLATION}_FEED_ATOM
- for new, old in [('FEED', 'FEED_ATOM'), ('TAG_FEED', 'TAG_FEED_ATOM'),
- ('CATEGORY_FEED', 'CATEGORY_FEED_ATOM'),
- ('TRANSLATION_FEED', 'TRANSLATION_FEED_ATOM')]:
+ for new, old in [
+ ("FEED", "FEED_ATOM"),
+ ("TAG_FEED", "TAG_FEED_ATOM"),
+ ("CATEGORY_FEED", "CATEGORY_FEED_ATOM"),
+ ("TRANSLATION_FEED", "TRANSLATION_FEED_ATOM"),
+ ]:
if settings.get(new, False):
logger.warning(
- 'Found deprecated `%(new)s` in settings. Modify %(new)s '
- 'to %(old)s in your settings and theme for the same '
- 'behavior. Temporarily setting %(old)s for backwards '
- 'compatibility.',
- {'new': new, 'old': old}
+ "Found deprecated `%(new)s` in settings. Modify %(new)s "
+ "to %(old)s in your settings and theme for the same "
+ "behavior. Temporarily setting %(old)s for backwards "
+ "compatibility.",
+ {"new": new, "old": old},
)
settings[old] = settings[new]
@@ -512,34 +565,34 @@ def configure_settings(settings):
settings.
Also, specify the log messages to be ignored.
"""
- if 'PATH' not in settings or not os.path.isdir(settings['PATH']):
- raise Exception('You need to specify a path containing the content'
- ' (see pelican --help for more information)')
+ if "PATH" not in settings or not os.path.isdir(settings["PATH"]):
+ raise Exception(
+ "You need to specify a path containing the content"
+ " (see pelican --help for more information)"
+ )
# specify the log messages to be ignored
- log_filter = settings.get('LOG_FILTER', DEFAULT_CONFIG['LOG_FILTER'])
+ log_filter = settings.get("LOG_FILTER", DEFAULT_CONFIG["LOG_FILTER"])
LimitFilter._ignore.update(set(log_filter))
# lookup the theme in "pelican/themes" if the given one doesn't exist
- if not os.path.isdir(settings['THEME']):
+ if not os.path.isdir(settings["THEME"]):
theme_path = os.path.join(
- os.path.dirname(os.path.abspath(__file__)),
- 'themes',
- settings['THEME'])
+ os.path.dirname(os.path.abspath(__file__)), "themes", settings["THEME"]
+ )
if os.path.exists(theme_path):
- settings['THEME'] = theme_path
+ settings["THEME"] = theme_path
else:
- raise Exception("Could not find the theme %s"
- % settings['THEME'])
+ raise Exception("Could not find the theme %s" % settings["THEME"])
# make paths selected for writing absolute if necessary
- settings['WRITE_SELECTED'] = [
- os.path.abspath(path) for path in
- settings.get('WRITE_SELECTED', DEFAULT_CONFIG['WRITE_SELECTED'])
+ settings["WRITE_SELECTED"] = [
+ os.path.abspath(path)
+ for path in settings.get("WRITE_SELECTED", DEFAULT_CONFIG["WRITE_SELECTED"])
]
# standardize strings to lowercase strings
- for key in ['DEFAULT_LANG']:
+ for key in ["DEFAULT_LANG"]:
if key in settings:
settings[key] = settings[key].lower()
@@ -547,24 +600,26 @@ def configure_settings(settings):
settings = get_jinja_environment(settings)
# standardize strings to lists
- for key in ['LOCALE']:
+ for key in ["LOCALE"]:
if key in settings and isinstance(settings[key], str):
settings[key] = [settings[key]]
# check settings that must be a particular type
for key, types in [
- ('OUTPUT_SOURCES_EXTENSION', str),
- ('FILENAME_METADATA', str),
+ ("OUTPUT_SOURCES_EXTENSION", str),
+ ("FILENAME_METADATA", str),
]:
if key in settings and not isinstance(settings[key], types):
value = settings.pop(key)
logger.warn(
- 'Detected misconfigured %s (%s), '
- 'falling back to the default (%s)',
- key, value, DEFAULT_CONFIG[key])
+ "Detected misconfigured %s (%s), " "falling back to the default (%s)",
+ key,
+ value,
+ DEFAULT_CONFIG[key],
+ )
# try to set the different locales, fallback on the default.
- locales = settings.get('LOCALE', DEFAULT_CONFIG['LOCALE'])
+ locales = settings.get("LOCALE", DEFAULT_CONFIG["LOCALE"])
for locale_ in locales:
try:
@@ -575,95 +630,111 @@ def configure_settings(settings):
else:
logger.warning(
"Locale could not be set. Check the LOCALE setting, ensuring it "
- "is valid and available on your system.")
+ "is valid and available on your system."
+ )
- if ('SITEURL' in settings):
+ if "SITEURL" in settings:
# If SITEURL has a trailing slash, remove it and provide a warning
- siteurl = settings['SITEURL']
- if (siteurl.endswith('/')):
- settings['SITEURL'] = siteurl[:-1]
+ siteurl = settings["SITEURL"]
+ if siteurl.endswith("/"):
+ settings["SITEURL"] = siteurl[:-1]
logger.warning("Removed extraneous trailing slash from SITEURL.")
# If SITEURL is defined but FEED_DOMAIN isn't,
# set FEED_DOMAIN to SITEURL
- if 'FEED_DOMAIN' not in settings:
- settings['FEED_DOMAIN'] = settings['SITEURL']
+ if "FEED_DOMAIN" not in settings:
+ settings["FEED_DOMAIN"] = settings["SITEURL"]
# check content caching layer and warn of incompatibilities
- if settings.get('CACHE_CONTENT', False) and \
- settings.get('CONTENT_CACHING_LAYER', '') == 'generator' and \
- not settings.get('WITH_FUTURE_DATES', True):
+ if (
+ settings.get("CACHE_CONTENT", False)
+ and settings.get("CONTENT_CACHING_LAYER", "") == "generator"
+ and not settings.get("WITH_FUTURE_DATES", True)
+ ):
logger.warning(
"WITH_FUTURE_DATES conflicts with CONTENT_CACHING_LAYER "
- "set to 'generator', use 'reader' layer instead")
+ "set to 'generator', use 'reader' layer instead"
+ )
# Warn if feeds are generated with both SITEURL & FEED_DOMAIN undefined
feed_keys = [
- 'FEED_ATOM', 'FEED_RSS',
- 'FEED_ALL_ATOM', 'FEED_ALL_RSS',
- 'CATEGORY_FEED_ATOM', 'CATEGORY_FEED_RSS',
- 'AUTHOR_FEED_ATOM', 'AUTHOR_FEED_RSS',
- 'TAG_FEED_ATOM', 'TAG_FEED_RSS',
- 'TRANSLATION_FEED_ATOM', 'TRANSLATION_FEED_RSS',
+ "FEED_ATOM",
+ "FEED_RSS",
+ "FEED_ALL_ATOM",
+ "FEED_ALL_RSS",
+ "CATEGORY_FEED_ATOM",
+ "CATEGORY_FEED_RSS",
+ "AUTHOR_FEED_ATOM",
+ "AUTHOR_FEED_RSS",
+ "TAG_FEED_ATOM",
+ "TAG_FEED_RSS",
+ "TRANSLATION_FEED_ATOM",
+ "TRANSLATION_FEED_RSS",
]
if any(settings.get(k) for k in feed_keys):
- if not settings.get('SITEURL'):
- logger.warning('Feeds generated without SITEURL set properly may'
- ' not be valid')
+ if not settings.get("SITEURL"):
+ logger.warning(
+ "Feeds generated without SITEURL set properly may" " not be valid"
+ )
- if 'TIMEZONE' not in settings:
+ if "TIMEZONE" not in settings:
logger.warning(
- 'No timezone information specified in the settings. Assuming'
- ' your timezone is UTC for feed generation. Check '
- 'https://docs.getpelican.com/en/latest/settings.html#TIMEZONE '
- 'for more information')
+ "No timezone information specified in the settings. Assuming"
+ " your timezone is UTC for feed generation. Check "
+ "https://docs.getpelican.com/en/latest/settings.html#TIMEZONE "
+ "for more information"
+ )
# fix up pagination rules
from pelican.paginator import PaginationRule
+
pagination_rules = [
- PaginationRule(*r) for r in settings.get(
- 'PAGINATION_PATTERNS',
- DEFAULT_CONFIG['PAGINATION_PATTERNS'],
+ PaginationRule(*r)
+ for r in settings.get(
+ "PAGINATION_PATTERNS",
+ DEFAULT_CONFIG["PAGINATION_PATTERNS"],
)
]
- settings['PAGINATION_PATTERNS'] = sorted(
+ settings["PAGINATION_PATTERNS"] = sorted(
pagination_rules,
key=lambda r: r[0],
)
# Save people from accidentally setting a string rather than a list
path_keys = (
- 'ARTICLE_EXCLUDES',
- 'DEFAULT_METADATA',
- 'DIRECT_TEMPLATES',
- 'THEME_TEMPLATES_OVERRIDES',
- 'FILES_TO_COPY',
- 'IGNORE_FILES',
- 'PAGINATED_DIRECT_TEMPLATES',
- 'PLUGINS',
- 'STATIC_EXCLUDES',
- 'STATIC_PATHS',
- 'THEME_STATIC_PATHS',
- 'ARTICLE_PATHS',
- 'PAGE_PATHS',
+ "ARTICLE_EXCLUDES",
+ "DEFAULT_METADATA",
+ "DIRECT_TEMPLATES",
+ "THEME_TEMPLATES_OVERRIDES",
+ "FILES_TO_COPY",
+ "IGNORE_FILES",
+ "PAGINATED_DIRECT_TEMPLATES",
+ "PLUGINS",
+ "STATIC_EXCLUDES",
+ "STATIC_PATHS",
+ "THEME_STATIC_PATHS",
+ "ARTICLE_PATHS",
+ "PAGE_PATHS",
)
for PATH_KEY in filter(lambda k: k in settings, path_keys):
if isinstance(settings[PATH_KEY], str):
- logger.warning("Detected misconfiguration with %s setting "
- "(must be a list), falling back to the default",
- PATH_KEY)
+ logger.warning(
+ "Detected misconfiguration with %s setting "
+ "(must be a list), falling back to the default",
+ PATH_KEY,
+ )
settings[PATH_KEY] = DEFAULT_CONFIG[PATH_KEY]
# Add {PAGE,ARTICLE}_PATHS to {ARTICLE,PAGE}_EXCLUDES
- mutually_exclusive = ('ARTICLE', 'PAGE')
+ mutually_exclusive = ("ARTICLE", "PAGE")
for type_1, type_2 in [mutually_exclusive, mutually_exclusive[::-1]]:
try:
- includes = settings[type_1 + '_PATHS']
- excludes = settings[type_2 + '_EXCLUDES']
+ includes = settings[type_1 + "_PATHS"]
+ excludes = settings[type_2 + "_EXCLUDES"]
for path in includes:
if path not in excludes:
excludes.append(path)
except KeyError:
- continue # setting not specified, nothing to do
+ continue # setting not specified, nothing to do
return settings
diff --git a/pelican/signals.py b/pelican/signals.py
--- a/pelican/signals.py
+++ b/pelican/signals.py
@@ -1,4 +1,4 @@
raise ImportError(
- 'Importing from `pelican.signals` is deprecated. '
- 'Use `from pelican import signals` or `import pelican.plugins.signals` instead.'
+ "Importing from `pelican.signals` is deprecated. "
+ "Use `from pelican import signals` or `import pelican.plugins.signals` instead."
)
diff --git a/pelican/tools/pelican_import.py b/pelican/tools/pelican_import.py
--- a/pelican/tools/pelican_import.py
+++ b/pelican/tools/pelican_import.py
@@ -47,74 +47,69 @@ def decode_wp_content(content, br=True):
pre_index += 1
content = content + last_pre
- content = re.sub(r'<br />\s*<br />', "\n\n", content)
- allblocks = ('(?:table|thead|tfoot|caption|col|colgroup|tbody|tr|'
- 'td|th|div|dl|dd|dt|ul|ol|li|pre|select|option|form|'
- 'map|area|blockquote|address|math|style|p|h[1-6]|hr|'
- 'fieldset|noscript|samp|legend|section|article|aside|'
- 'hgroup|header|footer|nav|figure|figcaption|details|'
- 'menu|summary)')
- content = re.sub(r'(<' + allblocks + r'[^>]*>)', "\n\\1", content)
- content = re.sub(r'(</' + allblocks + r'>)', "\\1\n\n", content)
+ content = re.sub(r"<br />\s*<br />", "\n\n", content)
+ allblocks = (
+ "(?:table|thead|tfoot|caption|col|colgroup|tbody|tr|"
+ "td|th|div|dl|dd|dt|ul|ol|li|pre|select|option|form|"
+ "map|area|blockquote|address|math|style|p|h[1-6]|hr|"
+ "fieldset|noscript|samp|legend|section|article|aside|"
+ "hgroup|header|footer|nav|figure|figcaption|details|"
+ "menu|summary)"
+ )
+ content = re.sub(r"(<" + allblocks + r"[^>]*>)", "\n\\1", content)
+ content = re.sub(r"(</" + allblocks + r">)", "\\1\n\n", content)
# content = content.replace("\r\n", "\n")
if "<object" in content:
# no <p> inside object/embed
- content = re.sub(r'\s*<param([^>]*)>\s*', "<param\\1>", content)
- content = re.sub(r'\s*</embed>\s*', '</embed>', content)
+ content = re.sub(r"\s*<param([^>]*)>\s*", "<param\\1>", content)
+ content = re.sub(r"\s*</embed>\s*", "</embed>", content)
# content = re.sub(r'/\n\n+/', '\n\n', content)
- pgraphs = filter(lambda s: s != "", re.split(r'\n\s*\n', content))
+ pgraphs = filter(lambda s: s != "", re.split(r"\n\s*\n", content))
content = ""
for p in pgraphs:
content = content + "<p>" + p.strip() + "</p>\n"
# under certain strange conditions it could create
# a P of entirely whitespace
- content = re.sub(r'<p>\s*</p>', '', content)
- content = re.sub(
- r'<p>([^<]+)</(div|address|form)>',
- "<p>\\1</p></\\2>",
- content)
+ content = re.sub(r"<p>\s*</p>", "", content)
+ content = re.sub(r"<p>([^<]+)</(div|address|form)>", "<p>\\1</p></\\2>", content)
# don't wrap tags
- content = re.sub(
- r'<p>\s*(</?' + allblocks + r'[^>]*>)\s*</p>',
- "\\1",
- content)
+ content = re.sub(r"<p>\s*(</?" + allblocks + r"[^>]*>)\s*</p>", "\\1", content)
# problem with nested lists
- content = re.sub(r'<p>(<li.*)</p>', "\\1", content)
- content = re.sub(r'<p><blockquote([^>]*)>', "<blockquote\\1><p>", content)
- content = content.replace('</blockquote></p>', '</p></blockquote>')
- content = re.sub(r'<p>\s*(</?' + allblocks + '[^>]*>)', "\\1", content)
- content = re.sub(r'(</?' + allblocks + r'[^>]*>)\s*</p>', "\\1", content)
+ content = re.sub(r"<p>(<li.*)</p>", "\\1", content)
+ content = re.sub(r"<p><blockquote([^>]*)>", "<blockquote\\1><p>", content)
+ content = content.replace("</blockquote></p>", "</p></blockquote>")
+ content = re.sub(r"<p>\s*(</?" + allblocks + "[^>]*>)", "\\1", content)
+ content = re.sub(r"(</?" + allblocks + r"[^>]*>)\s*</p>", "\\1", content)
if br:
+
def _preserve_newline(match):
return match.group(0).replace("\n", "<WPPreserveNewline />")
- content = re.sub(
- r'/<(script|style).*?<\/\\1>/s',
- _preserve_newline,
- content)
+
+ content = re.sub(r"/<(script|style).*?<\/\\1>/s", _preserve_newline, content)
# optionally make line breaks
- content = re.sub(r'(?<!<br />)\s*\n', "<br />\n", content)
+ content = re.sub(r"(?<!<br />)\s*\n", "<br />\n", content)
content = content.replace("<WPPreserveNewline />", "\n")
+ content = re.sub(r"(</?" + allblocks + r"[^>]*>)\s*<br />", "\\1", content)
content = re.sub(
- r'(</?' + allblocks + r'[^>]*>)\s*<br />', "\\1",
- content)
- content = re.sub(
- r'<br />(\s*</?(?:p|li|div|dl|dd|dt|th|pre|td|ul|ol)[^>]*>)',
- '\\1',
- content)
- content = re.sub(r'\n</p>', "</p>", content)
+ r"<br />(\s*</?(?:p|li|div|dl|dd|dt|th|pre|td|ul|ol)[^>]*>)", "\\1", content
+ )
+ content = re.sub(r"\n</p>", "</p>", content)
if pre_tags:
+
def _multi_replace(dic, string):
- pattern = r'|'.join(map(re.escape, dic.keys()))
+ pattern = r"|".join(map(re.escape, dic.keys()))
return re.sub(pattern, lambda m: dic[m.group()], string)
+
content = _multi_replace(pre_tags, content)
# convert [caption] tags into <figure>
content = re.sub(
- r'\[caption(?:.*?)(?:caption=\"(.*?)\")?\]'
- r'((?:\<a(?:.*?)\>)?(?:\<img.*?\>)(?:\<\/a\>)?)\s?(.*?)\[\/caption\]',
- r'<figure>\n\2\n<figcaption>\1\3</figcaption>\n</figure>',
- content)
+ r"\[caption(?:.*?)(?:caption=\"(.*?)\")?\]"
+ r"((?:\<a(?:.*?)\>)?(?:\<img.*?\>)(?:\<\/a\>)?)\s?(.*?)\[\/caption\]",
+ r"<figure>\n\2\n<figcaption>\1\3</figcaption>\n</figure>",
+ content,
+ )
return content
@@ -124,10 +119,12 @@ def xml_to_soup(xml):
try:
from bs4 import BeautifulSoup
except ImportError:
- error = ('Missing dependency "BeautifulSoup4" and "lxml" required to '
- 'import XML files.')
+ error = (
+ 'Missing dependency "BeautifulSoup4" and "lxml" required to '
+ "import XML files."
+ )
sys.exit(error)
- with open(xml, encoding='utf-8') as infile:
+ with open(xml, encoding="utf-8") as infile:
xmlfile = infile.read()
soup = BeautifulSoup(xmlfile, "xml")
return soup
@@ -144,111 +141,125 @@ def wp2fields(xml, wp_custpost=False):
"""Opens a wordpress XML file, and yield Pelican fields"""
soup = xml_to_soup(xml)
- items = soup.rss.channel.findAll('item')
+ items = soup.rss.channel.findAll("item")
for item in items:
-
- if item.find('status').string in ["publish", "draft"]:
-
+ if item.find("status").string in ["publish", "draft"]:
try:
# Use HTMLParser due to issues with BeautifulSoup 3
title = unescape(item.title.contents[0])
except IndexError:
- title = 'No title [%s]' % item.find('post_name').string
+ title = "No title [%s]" % item.find("post_name").string
logger.warning('Post "%s" is lacking a proper title', title)
- post_name = item.find('post_name').string
- post_id = item.find('post_id').string
+ post_name = item.find("post_name").string
+ post_id = item.find("post_id").string
filename = get_filename(post_name, post_id)
- content = item.find('encoded').string
- raw_date = item.find('post_date').string
- if raw_date == '0000-00-00 00:00:00':
+ content = item.find("encoded").string
+ raw_date = item.find("post_date").string
+ if raw_date == "0000-00-00 00:00:00":
date = None
else:
- date_object = SafeDatetime.strptime(
- raw_date, '%Y-%m-%d %H:%M:%S')
- date = date_object.strftime('%Y-%m-%d %H:%M')
- author = item.find('creator').string
+ date_object = SafeDatetime.strptime(raw_date, "%Y-%m-%d %H:%M:%S")
+ date = date_object.strftime("%Y-%m-%d %H:%M")
+ author = item.find("creator").string
- categories = [cat.string for cat
- in item.findAll('category', {'domain': 'category'})]
+ categories = [
+ cat.string for cat in item.findAll("category", {"domain": "category"})
+ ]
- tags = [tag.string for tag
- in item.findAll('category', {'domain': 'post_tag'})]
+ tags = [
+ tag.string for tag in item.findAll("category", {"domain": "post_tag"})
+ ]
# To publish a post the status should be 'published'
- status = 'published' if item.find('status').string == "publish" \
- else item.find('status').string
-
- kind = 'article'
- post_type = item.find('post_type').string
- if post_type == 'page':
- kind = 'page'
+ status = (
+ "published"
+ if item.find("status").string == "publish"
+ else item.find("status").string
+ )
+
+ kind = "article"
+ post_type = item.find("post_type").string
+ if post_type == "page":
+ kind = "page"
elif wp_custpost:
- if post_type == 'post':
+ if post_type == "post":
pass
# Old behaviour was to name everything not a page as an
# article.Theoretically all attachments have status == inherit
# so no attachments should be here. But this statement is to
# maintain existing behaviour in case that doesn't hold true.
- elif post_type == 'attachment':
+ elif post_type == "attachment":
pass
else:
kind = post_type
- yield (title, content, filename, date, author, categories,
- tags, status, kind, 'wp-html')
+ yield (
+ title,
+ content,
+ filename,
+ date,
+ author,
+ categories,
+ tags,
+ status,
+ kind,
+ "wp-html",
+ )
def blogger2fields(xml):
"""Opens a blogger XML file, and yield Pelican fields"""
soup = xml_to_soup(xml)
- entries = soup.feed.findAll('entry')
+ entries = soup.feed.findAll("entry")
for entry in entries:
raw_kind = entry.find(
- 'category', {'scheme': 'http://schemas.google.com/g/2005#kind'}
- ).get('term')
- if raw_kind == 'http://schemas.google.com/blogger/2008/kind#post':
- kind = 'article'
- elif raw_kind == 'http://schemas.google.com/blogger/2008/kind#comment':
- kind = 'comment'
- elif raw_kind == 'http://schemas.google.com/blogger/2008/kind#page':
- kind = 'page'
+ "category", {"scheme": "http://schemas.google.com/g/2005#kind"}
+ ).get("term")
+ if raw_kind == "http://schemas.google.com/blogger/2008/kind#post":
+ kind = "article"
+ elif raw_kind == "http://schemas.google.com/blogger/2008/kind#comment":
+ kind = "comment"
+ elif raw_kind == "http://schemas.google.com/blogger/2008/kind#page":
+ kind = "page"
else:
continue
try:
- assert kind != 'comment'
- filename = entry.find('link', {'rel': 'alternate'})['href']
+ assert kind != "comment"
+ filename = entry.find("link", {"rel": "alternate"})["href"]
filename = os.path.splitext(os.path.basename(filename))[0]
except (AssertionError, TypeError, KeyError):
- filename = entry.find('id').string.split('.')[-1]
+ filename = entry.find("id").string.split(".")[-1]
- title = entry.find('title').string or ''
+ title = entry.find("title").string or ""
- content = entry.find('content').string
- raw_date = entry.find('published').string
- if hasattr(SafeDatetime, 'fromisoformat'):
+ content = entry.find("content").string
+ raw_date = entry.find("published").string
+ if hasattr(SafeDatetime, "fromisoformat"):
date_object = SafeDatetime.fromisoformat(raw_date)
else:
- date_object = SafeDatetime.strptime(
- raw_date[:23], '%Y-%m-%dT%H:%M:%S.%f')
- date = date_object.strftime('%Y-%m-%d %H:%M')
- author = entry.find('author').find('name').string
+ date_object = SafeDatetime.strptime(raw_date[:23], "%Y-%m-%dT%H:%M:%S.%f")
+ date = date_object.strftime("%Y-%m-%d %H:%M")
+ author = entry.find("author").find("name").string
# blogger posts only have tags, no category
- tags = [tag.get('term') for tag in entry.findAll(
- 'category', {'scheme': 'http://www.blogger.com/atom/ns#'})]
+ tags = [
+ tag.get("term")
+ for tag in entry.findAll(
+ "category", {"scheme": "http://www.blogger.com/atom/ns#"}
+ )
+ ]
# Drafts have <app:control><app:draft>yes</app:draft></app:control>
- status = 'published'
+ status = "published"
try:
- if entry.find('control').find('draft').string == 'yes':
- status = 'draft'
+ if entry.find("control").find("draft").string == "yes":
+ status = "draft"
except AttributeError:
pass
- yield (title, content, filename, date, author, None, tags, status,
- kind, 'html')
+ yield (title, content, filename, date, author, None, tags, status, kind, "html")
def dc2fields(file):
@@ -256,9 +267,11 @@ def dc2fields(file):
try:
from bs4 import BeautifulSoup
except ImportError:
- error = ('Missing dependency '
- '"BeautifulSoup4" and "lxml" required '
- 'to import Dotclear files.')
+ error = (
+ "Missing dependency "
+ '"BeautifulSoup4" and "lxml" required '
+ "to import Dotclear files."
+ )
sys.exit(error)
in_cat = False
@@ -266,15 +279,14 @@ def dc2fields(file):
category_list = {}
posts = []
- with open(file, encoding='utf-8') as f:
-
+ with open(file, encoding="utf-8") as f:
for line in f:
# remove final \n
line = line[:-1]
- if line.startswith('[category'):
+ if line.startswith("[category"):
in_cat = True
- elif line.startswith('[post'):
+ elif line.startswith("[post"):
in_post = True
elif in_cat:
fields = line.split('","')
@@ -294,7 +306,7 @@ def dc2fields(file):
print("%i posts read." % len(posts))
- subs = DEFAULT_CONFIG['SLUG_REGEX_SUBSTITUTIONS']
+ subs = DEFAULT_CONFIG["SLUG_REGEX_SUBSTITUTIONS"]
for post in posts:
fields = post.split('","')
@@ -329,44 +341,39 @@ def dc2fields(file):
# redirect_url = fields[28][:-1]
# remove seconds
- post_creadt = ':'.join(post_creadt.split(':')[0:2])
+ post_creadt = ":".join(post_creadt.split(":")[0:2])
- author = ''
+ author = ""
categories = []
tags = []
if cat_id:
- categories = [category_list[id].strip() for id
- in cat_id.split(',')]
+ categories = [category_list[id].strip() for id in cat_id.split(",")]
# Get tags related to a post
- tag = (post_meta.replace('{', '')
- .replace('}', '')
- .replace('a:1:s:3:\\"tag\\";a:', '')
- .replace('a:0:', ''))
+ tag = (
+ post_meta.replace("{", "")
+ .replace("}", "")
+ .replace('a:1:s:3:\\"tag\\";a:', "")
+ .replace("a:0:", "")
+ )
if len(tag) > 1:
if int(len(tag[:1])) == 1:
newtag = tag.split('"')[1]
tags.append(
- BeautifulSoup(
- newtag,
- 'xml'
- )
+ BeautifulSoup(newtag, "xml")
# bs4 always outputs UTF-8
- .decode('utf-8')
+ .decode("utf-8")
)
else:
i = 1
j = 1
- while (i <= int(tag[:1])):
- newtag = tag.split('"')[j].replace('\\', '')
+ while i <= int(tag[:1]):
+ newtag = tag.split('"')[j].replace("\\", "")
tags.append(
- BeautifulSoup(
- newtag,
- 'xml'
- )
+ BeautifulSoup(newtag, "xml")
# bs4 always outputs UTF-8
- .decode('utf-8')
+ .decode("utf-8")
)
i = i + 1
if j < int(tag[:1]) * 2:
@@ -381,116 +388,149 @@ def dc2fields(file):
content = post_excerpt + post_content
else:
content = post_excerpt_xhtml + post_content_xhtml
- content = content.replace('\\n', '')
+ content = content.replace("\\n", "")
post_format = "html"
- kind = 'article' # TODO: Recognise pages
- status = 'published' # TODO: Find a way for draft posts
+ kind = "article" # TODO: Recognise pages
+ status = "published" # TODO: Find a way for draft posts
- yield (post_title, content, slugify(post_title, regex_subs=subs),
- post_creadt, author, categories, tags, status, kind,
- post_format)
+ yield (
+ post_title,
+ content,
+ slugify(post_title, regex_subs=subs),
+ post_creadt,
+ author,
+ categories,
+ tags,
+ status,
+ kind,
+ post_format,
+ )
def _get_tumblr_posts(api_key, blogname, offset=0):
import json
import urllib.request as urllib_request
- url = ("https://api.tumblr.com/v2/blog/%s.tumblr.com/"
- "posts?api_key=%s&offset=%d&filter=raw") % (
- blogname, api_key, offset)
+
+ url = (
+ "https://api.tumblr.com/v2/blog/%s.tumblr.com/"
+ "posts?api_key=%s&offset=%d&filter=raw"
+ ) % (blogname, api_key, offset)
request = urllib_request.Request(url)
handle = urllib_request.urlopen(request)
- posts = json.loads(handle.read().decode('utf-8'))
- return posts.get('response').get('posts')
+ posts = json.loads(handle.read().decode("utf-8"))
+ return posts.get("response").get("posts")
def tumblr2fields(api_key, blogname):
- """ Imports Tumblr posts (API v2)"""
+ """Imports Tumblr posts (API v2)"""
offset = 0
posts = _get_tumblr_posts(api_key, blogname, offset)
- subs = DEFAULT_CONFIG['SLUG_REGEX_SUBSTITUTIONS']
+ subs = DEFAULT_CONFIG["SLUG_REGEX_SUBSTITUTIONS"]
while len(posts) > 0:
for post in posts:
- title = \
- post.get('title') or \
- post.get('source_title') or \
- post.get('type').capitalize()
- slug = post.get('slug') or slugify(title, regex_subs=subs)
- tags = post.get('tags')
- timestamp = post.get('timestamp')
+ title = (
+ post.get("title")
+ or post.get("source_title")
+ or post.get("type").capitalize()
+ )
+ slug = post.get("slug") or slugify(title, regex_subs=subs)
+ tags = post.get("tags")
+ timestamp = post.get("timestamp")
date = SafeDatetime.fromtimestamp(
int(timestamp), tz=datetime.timezone.utc
).strftime("%Y-%m-%d %H:%M:%S%z")
- slug = SafeDatetime.fromtimestamp(
- int(timestamp), tz=datetime.timezone.utc
- ).strftime("%Y-%m-%d-") + slug
- format = post.get('format')
- content = post.get('body')
- type = post.get('type')
- if type == 'photo':
- if format == 'markdown':
- fmtstr = '![%s](%s)'
+ slug = (
+ SafeDatetime.fromtimestamp(
+ int(timestamp), tz=datetime.timezone.utc
+ ).strftime("%Y-%m-%d-")
+ + slug
+ )
+ format = post.get("format")
+ content = post.get("body")
+ type = post.get("type")
+ if type == "photo":
+ if format == "markdown":
+ fmtstr = "![%s](%s)"
else:
fmtstr = '<img alt="%s" src="%s" />'
- content = '\n'.join(
- fmtstr % (photo.get('caption'),
- photo.get('original_size').get('url'))
- for photo in post.get('photos'))
- elif type == 'quote':
- if format == 'markdown':
- fmtstr = '\n\n— %s'
+ content = "\n".join(
+ fmtstr
+ % (photo.get("caption"), photo.get("original_size").get("url"))
+ for photo in post.get("photos")
+ )
+ elif type == "quote":
+ if format == "markdown":
+ fmtstr = "\n\n— %s"
else:
- fmtstr = '<p>— %s</p>'
- content = post.get('text') + fmtstr % post.get('source')
- elif type == 'link':
- if format == 'markdown':
- fmtstr = '[via](%s)\n\n'
+ fmtstr = "<p>— %s</p>"
+ content = post.get("text") + fmtstr % post.get("source")
+ elif type == "link":
+ if format == "markdown":
+ fmtstr = "[via](%s)\n\n"
else:
fmtstr = '<p><a href="%s">via</a></p>\n'
- content = fmtstr % post.get('url') + post.get('description')
- elif type == 'audio':
- if format == 'markdown':
- fmtstr = '[via](%s)\n\n'
+ content = fmtstr % post.get("url") + post.get("description")
+ elif type == "audio":
+ if format == "markdown":
+ fmtstr = "[via](%s)\n\n"
else:
fmtstr = '<p><a href="%s">via</a></p>\n'
- content = fmtstr % post.get('source_url') + \
- post.get('caption') + \
- post.get('player')
- elif type == 'video':
- if format == 'markdown':
- fmtstr = '[via](%s)\n\n'
+ content = (
+ fmtstr % post.get("source_url")
+ + post.get("caption")
+ + post.get("player")
+ )
+ elif type == "video":
+ if format == "markdown":
+ fmtstr = "[via](%s)\n\n"
else:
fmtstr = '<p><a href="%s">via</a></p>\n'
- source = fmtstr % post.get('source_url')
- caption = post.get('caption')
+ source = fmtstr % post.get("source_url")
+ caption = post.get("caption")
players = [
# If embed_code is False, couldn't get the video
- player.get('embed_code') or None
- for player in post.get('player')]
+ player.get("embed_code") or None
+ for player in post.get("player")
+ ]
# If there are no embeddable players, say so, once
- if len(players) > 0 and all(
- player is None for player in players):
+ if len(players) > 0 and all(player is None for player in players):
players = "<p>(This video isn't available anymore.)</p>\n"
else:
- players = '\n'.join(players)
+ players = "\n".join(players)
content = source + caption + players
- elif type == 'answer':
- title = post.get('question')
- content = ('<p>'
- '<a href="%s" rel="external nofollow">%s</a>'
- ': %s'
- '</p>\n'
- ' %s' % (post.get('asking_name'),
- post.get('asking_url'),
- post.get('question'),
- post.get('answer')))
-
- content = content.rstrip() + '\n'
- kind = 'article'
- status = 'published' # TODO: Find a way for draft posts
-
- yield (title, content, slug, date, post.get('blog_name'), [type],
- tags, status, kind, format)
+ elif type == "answer":
+ title = post.get("question")
+ content = (
+ "<p>"
+ '<a href="%s" rel="external nofollow">%s</a>'
+ ": %s"
+ "</p>\n"
+ " %s"
+ % (
+ post.get("asking_name"),
+ post.get("asking_url"),
+ post.get("question"),
+ post.get("answer"),
+ )
+ )
+
+ content = content.rstrip() + "\n"
+ kind = "article"
+ status = "published" # TODO: Find a way for draft posts
+
+ yield (
+ title,
+ content,
+ slug,
+ date,
+ post.get("blog_name"),
+ [type],
+ tags,
+ status,
+ kind,
+ format,
+ )
offset += len(posts)
posts = _get_tumblr_posts(api_key, blogname, offset)
@@ -499,145 +539,167 @@ def tumblr2fields(api_key, blogname):
def feed2fields(file):
"""Read a feed and yield pelican fields"""
import feedparser
+
d = feedparser.parse(file)
- subs = DEFAULT_CONFIG['SLUG_REGEX_SUBSTITUTIONS']
+ subs = DEFAULT_CONFIG["SLUG_REGEX_SUBSTITUTIONS"]
for entry in d.entries:
- date = (time.strftime('%Y-%m-%d %H:%M', entry.updated_parsed)
- if hasattr(entry, 'updated_parsed') else None)
- author = entry.author if hasattr(entry, 'author') else None
- tags = ([e['term'] for e in entry.tags]
- if hasattr(entry, 'tags') else None)
+ date = (
+ time.strftime("%Y-%m-%d %H:%M", entry.updated_parsed)
+ if hasattr(entry, "updated_parsed")
+ else None
+ )
+ author = entry.author if hasattr(entry, "author") else None
+ tags = [e["term"] for e in entry.tags] if hasattr(entry, "tags") else None
slug = slugify(entry.title, regex_subs=subs)
- kind = 'article'
- yield (entry.title, entry.description, slug, date,
- author, [], tags, None, kind, 'html')
-
-
-def build_header(title, date, author, categories, tags, slug,
- status=None, attachments=None):
+ kind = "article"
+ yield (
+ entry.title,
+ entry.description,
+ slug,
+ date,
+ author,
+ [],
+ tags,
+ None,
+ kind,
+ "html",
+ )
+
+
+def build_header(
+ title, date, author, categories, tags, slug, status=None, attachments=None
+):
"""Build a header from a list of fields"""
from docutils.utils import column_width
- header = '{}\n{}\n'.format(title, '#' * column_width(title))
+ header = "{}\n{}\n".format(title, "#" * column_width(title))
if date:
- header += ':date: %s\n' % date
+ header += ":date: %s\n" % date
if author:
- header += ':author: %s\n' % author
+ header += ":author: %s\n" % author
if categories:
- header += ':category: %s\n' % ', '.join(categories)
+ header += ":category: %s\n" % ", ".join(categories)
if tags:
- header += ':tags: %s\n' % ', '.join(tags)
+ header += ":tags: %s\n" % ", ".join(tags)
if slug:
- header += ':slug: %s\n' % slug
+ header += ":slug: %s\n" % slug
if status:
- header += ':status: %s\n' % status
+ header += ":status: %s\n" % status
if attachments:
- header += ':attachments: %s\n' % ', '.join(attachments)
- header += '\n'
+ header += ":attachments: %s\n" % ", ".join(attachments)
+ header += "\n"
return header
-def build_asciidoc_header(title, date, author, categories, tags, slug,
- status=None, attachments=None):
+def build_asciidoc_header(
+ title, date, author, categories, tags, slug, status=None, attachments=None
+):
"""Build a header from a list of fields"""
- header = '= %s\n' % title
+ header = "= %s\n" % title
if author:
- header += '%s\n' % author
+ header += "%s\n" % author
if date:
- header += '%s\n' % date
+ header += "%s\n" % date
if categories:
- header += ':category: %s\n' % ', '.join(categories)
+ header += ":category: %s\n" % ", ".join(categories)
if tags:
- header += ':tags: %s\n' % ', '.join(tags)
+ header += ":tags: %s\n" % ", ".join(tags)
if slug:
- header += ':slug: %s\n' % slug
+ header += ":slug: %s\n" % slug
if status:
- header += ':status: %s\n' % status
+ header += ":status: %s\n" % status
if attachments:
- header += ':attachments: %s\n' % ', '.join(attachments)
- header += '\n'
+ header += ":attachments: %s\n" % ", ".join(attachments)
+ header += "\n"
return header
-def build_markdown_header(title, date, author, categories, tags,
- slug, status=None, attachments=None):
+def build_markdown_header(
+ title, date, author, categories, tags, slug, status=None, attachments=None
+):
"""Build a header from a list of fields"""
- header = 'Title: %s\n' % title
+ header = "Title: %s\n" % title
if date:
- header += 'Date: %s\n' % date
+ header += "Date: %s\n" % date
if author:
- header += 'Author: %s\n' % author
+ header += "Author: %s\n" % author
if categories:
- header += 'Category: %s\n' % ', '.join(categories)
+ header += "Category: %s\n" % ", ".join(categories)
if tags:
- header += 'Tags: %s\n' % ', '.join(tags)
+ header += "Tags: %s\n" % ", ".join(tags)
if slug:
- header += 'Slug: %s\n' % slug
+ header += "Slug: %s\n" % slug
if status:
- header += 'Status: %s\n' % status
+ header += "Status: %s\n" % status
if attachments:
- header += 'Attachments: %s\n' % ', '.join(attachments)
- header += '\n'
+ header += "Attachments: %s\n" % ", ".join(attachments)
+ header += "\n"
return header
-def get_ext(out_markup, in_markup='html'):
- if out_markup == 'asciidoc':
- ext = '.adoc'
- elif in_markup == 'markdown' or out_markup == 'markdown':
- ext = '.md'
+def get_ext(out_markup, in_markup="html"):
+ if out_markup == "asciidoc":
+ ext = ".adoc"
+ elif in_markup == "markdown" or out_markup == "markdown":
+ ext = ".md"
else:
- ext = '.rst'
+ ext = ".rst"
return ext
-def get_out_filename(output_path, filename, ext, kind,
- dirpage, dircat, categories, wp_custpost, slug_subs):
+def get_out_filename(
+ output_path,
+ filename,
+ ext,
+ kind,
+ dirpage,
+ dircat,
+ categories,
+ wp_custpost,
+ slug_subs,
+):
filename = os.path.basename(filename)
# Enforce filename restrictions for various filesystems at once; see
# https://en.wikipedia.org/wiki/Filename#Reserved_characters_and_words
# we do not need to filter words because an extension will be appended
- filename = re.sub(r'[<>:"/\\|?*^% ]', '-', filename) # invalid chars
- filename = filename.lstrip('.') # should not start with a dot
+ filename = re.sub(r'[<>:"/\\|?*^% ]', "-", filename) # invalid chars
+ filename = filename.lstrip(".") # should not start with a dot
if not filename:
- filename = '_'
+ filename = "_"
filename = filename[:249] # allow for 5 extra characters
out_filename = os.path.join(output_path, filename + ext)
# option to put page posts in pages/ subdirectory
- if dirpage and kind == 'page':
- pages_dir = os.path.join(output_path, 'pages')
+ if dirpage and kind == "page":
+ pages_dir = os.path.join(output_path, "pages")
if not os.path.isdir(pages_dir):
os.mkdir(pages_dir)
out_filename = os.path.join(pages_dir, filename + ext)
- elif not dirpage and kind == 'page':
+ elif not dirpage and kind == "page":
pass
# option to put wp custom post types in directories with post type
# names. Custom post types can also have categories so option to
# create subdirectories with category names
- elif kind != 'article':
+ elif kind != "article":
if wp_custpost:
typename = slugify(kind, regex_subs=slug_subs)
else:
- typename = ''
- kind = 'article'
+ typename = ""
+ kind = "article"
if dircat and (len(categories) > 0):
- catname = slugify(
- categories[0], regex_subs=slug_subs, preserve_case=True)
+ catname = slugify(categories[0], regex_subs=slug_subs, preserve_case=True)
else:
- catname = ''
- out_filename = os.path.join(output_path, typename,
- catname, filename + ext)
+ catname = ""
+ out_filename = os.path.join(output_path, typename, catname, filename + ext)
if not os.path.isdir(os.path.join(output_path, typename, catname)):
os.makedirs(os.path.join(output_path, typename, catname))
# option to put files in directories with categories names
elif dircat and (len(categories) > 0):
- catname = slugify(
- categories[0], regex_subs=slug_subs, preserve_case=True)
+ catname = slugify(categories[0], regex_subs=slug_subs, preserve_case=True)
out_filename = os.path.join(output_path, catname, filename + ext)
if not os.path.isdir(os.path.join(output_path, catname)):
os.mkdir(os.path.join(output_path, catname))
@@ -650,18 +712,19 @@ def get_attachments(xml):
of the attachment_urls
"""
soup = xml_to_soup(xml)
- items = soup.rss.channel.findAll('item')
+ items = soup.rss.channel.findAll("item")
names = {}
attachments = []
for item in items:
- kind = item.find('post_type').string
- post_name = item.find('post_name').string
- post_id = item.find('post_id').string
-
- if kind == 'attachment':
- attachments.append((item.find('post_parent').string,
- item.find('attachment_url').string))
+ kind = item.find("post_type").string
+ post_name = item.find("post_name").string
+ post_id = item.find("post_id").string
+
+ if kind == "attachment":
+ attachments.append(
+ (item.find("post_parent").string, item.find("attachment_url").string)
+ )
else:
filename = get_filename(post_name, post_id)
names[post_id] = filename
@@ -686,23 +749,23 @@ def download_attachments(output_path, urls):
path = urlparse(url).path
# teardown path and rebuild to negate any errors with
# os.path.join and leading /'s
- path = path.split('/')
+ path = path.split("/")
filename = path.pop(-1)
- localpath = ''
+ localpath = ""
for item in path:
- if sys.platform != 'win32' or ':' not in item:
+ if sys.platform != "win32" or ":" not in item:
localpath = os.path.join(localpath, item)
full_path = os.path.join(output_path, localpath)
# Generate percent-encoded URL
scheme, netloc, path, query, fragment = urlsplit(url)
- if scheme != 'file':
+ if scheme != "file":
path = quote(path)
url = urlunsplit((scheme, netloc, path, query, fragment))
if not os.path.exists(full_path):
os.makedirs(full_path)
- print('downloading {}'.format(filename))
+ print("downloading {}".format(filename))
try:
urlretrieve(url, os.path.join(full_path, filename))
locations[url] = os.path.join(localpath, filename)
@@ -713,43 +776,61 @@ def download_attachments(output_path, urls):
def is_pandoc_needed(in_markup):
- return in_markup in ('html', 'wp-html')
+ return in_markup in ("html", "wp-html")
def get_pandoc_version():
- cmd = ['pandoc', '--version']
+ cmd = ["pandoc", "--version"]
try:
output = subprocess.check_output(cmd, universal_newlines=True)
except (subprocess.CalledProcessError, OSError) as e:
logger.warning("Pandoc version unknown: %s", e)
return ()
- return tuple(int(i) for i in output.split()[1].split('.'))
+ return tuple(int(i) for i in output.split()[1].split("."))
def update_links_to_attached_files(content, attachments):
for old_url, new_path in attachments.items():
# url may occur both with http:// and https://
- http_url = old_url.replace('https://', 'http://')
- https_url = old_url.replace('http://', 'https://')
+ http_url = old_url.replace("https://", "http://")
+ https_url = old_url.replace("http://", "https://")
for url in [http_url, https_url]:
- content = content.replace(url, '{static}' + new_path)
+ content = content.replace(url, "{static}" + new_path)
return content
def fields2pelican(
- fields, out_markup, output_path,
- dircat=False, strip_raw=False, disable_slugs=False,
- dirpage=False, filename_template=None, filter_author=None,
- wp_custpost=False, wp_attach=False, attachments=None):
-
+ fields,
+ out_markup,
+ output_path,
+ dircat=False,
+ strip_raw=False,
+ disable_slugs=False,
+ dirpage=False,
+ filename_template=None,
+ filter_author=None,
+ wp_custpost=False,
+ wp_attach=False,
+ attachments=None,
+):
pandoc_version = get_pandoc_version()
posts_require_pandoc = []
- slug_subs = DEFAULT_CONFIG['SLUG_REGEX_SUBSTITUTIONS']
-
- for (title, content, filename, date, author, categories, tags, status,
- kind, in_markup) in fields:
+ slug_subs = DEFAULT_CONFIG["SLUG_REGEX_SUBSTITUTIONS"]
+
+ for (
+ title,
+ content,
+ filename,
+ date,
+ author,
+ categories,
+ tags,
+ status,
+ kind,
+ in_markup,
+ ) in fields:
if filter_author and filter_author != author:
continue
if is_pandoc_needed(in_markup) and not pandoc_version:
@@ -767,85 +848,120 @@ def fields2pelican(
links = None
ext = get_ext(out_markup, in_markup)
- if ext == '.adoc':
- header = build_asciidoc_header(title, date, author, categories,
- tags, slug, status, attachments)
- elif ext == '.md':
+ if ext == ".adoc":
+ header = build_asciidoc_header(
+ title, date, author, categories, tags, slug, status, attachments
+ )
+ elif ext == ".md":
header = build_markdown_header(
- title, date, author, categories, tags, slug,
- status, links.values() if links else None)
+ title,
+ date,
+ author,
+ categories,
+ tags,
+ slug,
+ status,
+ links.values() if links else None,
+ )
else:
- out_markup = 'rst'
- header = build_header(title, date, author, categories,
- tags, slug, status, links.values()
- if links else None)
+ out_markup = "rst"
+ header = build_header(
+ title,
+ date,
+ author,
+ categories,
+ tags,
+ slug,
+ status,
+ links.values() if links else None,
+ )
out_filename = get_out_filename(
- output_path, filename, ext, kind, dirpage, dircat,
- categories, wp_custpost, slug_subs)
+ output_path,
+ filename,
+ ext,
+ kind,
+ dirpage,
+ dircat,
+ categories,
+ wp_custpost,
+ slug_subs,
+ )
print(out_filename)
- if in_markup in ('html', 'wp-html'):
+ if in_markup in ("html", "wp-html"):
with tempfile.TemporaryDirectory() as tmpdir:
- html_filename = os.path.join(tmpdir, 'pandoc-input.html')
+ html_filename = os.path.join(tmpdir, "pandoc-input.html")
# Replace newlines with paragraphs wrapped with <p> so
# HTML is valid before conversion
- if in_markup == 'wp-html':
+ if in_markup == "wp-html":
new_content = decode_wp_content(content)
else:
paragraphs = content.splitlines()
- paragraphs = ['<p>{}</p>'.format(p) for p in paragraphs]
- new_content = ''.join(paragraphs)
- with open(html_filename, 'w', encoding='utf-8') as fp:
+ paragraphs = ["<p>{}</p>".format(p) for p in paragraphs]
+ new_content = "".join(paragraphs)
+ with open(html_filename, "w", encoding="utf-8") as fp:
fp.write(new_content)
if pandoc_version < (2,):
- parse_raw = '--parse-raw' if not strip_raw else ''
- wrap_none = '--wrap=none' \
- if pandoc_version >= (1, 16) else '--no-wrap'
- cmd = ('pandoc --normalize {0} --from=html'
- ' --to={1} {2} -o "{3}" "{4}"')
- cmd = cmd.format(parse_raw,
- out_markup if out_markup != 'markdown' else "gfm",
- wrap_none,
- out_filename, html_filename)
+ parse_raw = "--parse-raw" if not strip_raw else ""
+ wrap_none = (
+ "--wrap=none" if pandoc_version >= (1, 16) else "--no-wrap"
+ )
+ cmd = (
+ "pandoc --normalize {0} --from=html"
+ ' --to={1} {2} -o "{3}" "{4}"'
+ )
+ cmd = cmd.format(
+ parse_raw,
+ out_markup if out_markup != "markdown" else "gfm",
+ wrap_none,
+ out_filename,
+ html_filename,
+ )
else:
- from_arg = '-f html+raw_html' if not strip_raw else '-f html'
- cmd = ('pandoc {0} --to={1}-smart --wrap=none -o "{2}" "{3}"')
- cmd = cmd.format(from_arg,
- out_markup if out_markup != 'markdown' else "gfm",
- out_filename, html_filename)
+ from_arg = "-f html+raw_html" if not strip_raw else "-f html"
+ cmd = 'pandoc {0} --to={1}-smart --wrap=none -o "{2}" "{3}"'
+ cmd = cmd.format(
+ from_arg,
+ out_markup if out_markup != "markdown" else "gfm",
+ out_filename,
+ html_filename,
+ )
try:
rc = subprocess.call(cmd, shell=True)
if rc < 0:
- error = 'Child was terminated by signal %d' % -rc
+ error = "Child was terminated by signal %d" % -rc
exit(error)
elif rc > 0:
- error = 'Please, check your Pandoc installation.'
+ error = "Please, check your Pandoc installation."
exit(error)
except OSError as e:
- error = 'Pandoc execution failed: %s' % e
+ error = "Pandoc execution failed: %s" % e
exit(error)
- with open(out_filename, encoding='utf-8') as fs:
+ with open(out_filename, encoding="utf-8") as fs:
content = fs.read()
- if out_markup == 'markdown':
+ if out_markup == "markdown":
# In markdown, to insert a <br />, end a line with two
# or more spaces & then a end-of-line
- content = content.replace('\\\n ', ' \n')
- content = content.replace('\\\n', ' \n')
+ content = content.replace("\\\n ", " \n")
+ content = content.replace("\\\n", " \n")
if wp_attach and links:
content = update_links_to_attached_files(content, links)
- with open(out_filename, 'w', encoding='utf-8') as fs:
+ with open(out_filename, "w", encoding="utf-8") as fs:
fs.write(header + content)
if posts_require_pandoc:
- logger.error("Pandoc must be installed to import the following posts:"
- "\n {}".format("\n ".join(posts_require_pandoc)))
+ logger.error(
+ "Pandoc must be installed to import the following posts:" "\n {}".format(
+ "\n ".join(posts_require_pandoc)
+ )
+ )
if wp_attach and attachments and None in attachments:
print("downloading attachments that don't have a parent post")
@@ -856,111 +972,136 @@ def fields2pelican(
def main():
parser = argparse.ArgumentParser(
description="Transform feed, Blogger, Dotclear, Tumblr, or "
- "WordPress files into reST (rst) or Markdown (md) files. "
- "Be sure to have pandoc installed.",
- formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ "WordPress files into reST (rst) or Markdown (md) files. "
+ "Be sure to have pandoc installed.",
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ )
+ parser.add_argument(dest="input", help="The input file to read")
parser.add_argument(
- dest='input', help='The input file to read')
- parser.add_argument(
- '--blogger', action='store_true', dest='blogger',
- help='Blogger XML export')
+ "--blogger", action="store_true", dest="blogger", help="Blogger XML export"
+ )
parser.add_argument(
- '--dotclear', action='store_true', dest='dotclear',
- help='Dotclear export')
+ "--dotclear", action="store_true", dest="dotclear", help="Dotclear export"
+ )
parser.add_argument(
- '--tumblr', action='store_true', dest='tumblr',
- help='Tumblr export')
+ "--tumblr", action="store_true", dest="tumblr", help="Tumblr export"
+ )
parser.add_argument(
- '--wpfile', action='store_true', dest='wpfile',
- help='Wordpress XML export')
+ "--wpfile", action="store_true", dest="wpfile", help="Wordpress XML export"
+ )
parser.add_argument(
- '--feed', action='store_true', dest='feed',
- help='Feed to parse')
+ "--feed", action="store_true", dest="feed", help="Feed to parse"
+ )
parser.add_argument(
- '-o', '--output', dest='output', default='content',
- help='Output path')
+ "-o", "--output", dest="output", default="content", help="Output path"
+ )
parser.add_argument(
- '-m', '--markup', dest='markup', default='rst',
- help='Output markup format (supports rst & markdown)')
+ "-m",
+ "--markup",
+ dest="markup",
+ default="rst",
+ help="Output markup format (supports rst & markdown)",
+ )
parser.add_argument(
- '--dir-cat', action='store_true', dest='dircat',
- help='Put files in directories with categories name')
+ "--dir-cat",
+ action="store_true",
+ dest="dircat",
+ help="Put files in directories with categories name",
+ )
parser.add_argument(
- '--dir-page', action='store_true', dest='dirpage',
- help=('Put files recognised as pages in "pages/" sub-directory'
- ' (blogger and wordpress import only)'))
+ "--dir-page",
+ action="store_true",
+ dest="dirpage",
+ help=(
+ 'Put files recognised as pages in "pages/" sub-directory'
+ " (blogger and wordpress import only)"
+ ),
+ )
parser.add_argument(
- '--filter-author', dest='author',
- help='Import only post from the specified author')
+ "--filter-author",
+ dest="author",
+ help="Import only post from the specified author",
+ )
parser.add_argument(
- '--strip-raw', action='store_true', dest='strip_raw',
+ "--strip-raw",
+ action="store_true",
+ dest="strip_raw",
help="Strip raw HTML code that can't be converted to "
- "markup such as flash embeds or iframes (wordpress import only)")
+ "markup such as flash embeds or iframes (wordpress import only)",
+ )
parser.add_argument(
- '--wp-custpost', action='store_true',
- dest='wp_custpost',
- help='Put wordpress custom post types in directories. If used with '
- '--dir-cat option directories will be created as '
- '/post_type/category/ (wordpress import only)')
+ "--wp-custpost",
+ action="store_true",
+ dest="wp_custpost",
+ help="Put wordpress custom post types in directories. If used with "
+ "--dir-cat option directories will be created as "
+ "/post_type/category/ (wordpress import only)",
+ )
parser.add_argument(
- '--wp-attach', action='store_true', dest='wp_attach',
- help='(wordpress import only) Download files uploaded to wordpress as '
- 'attachments. Files will be added to posts as a list in the post '
- 'header. All files will be downloaded, even if '
- "they aren't associated with a post. Files will be downloaded "
- 'with their original path inside the output directory. '
- 'e.g. output/wp-uploads/date/postname/file.jpg '
- '-- Requires an internet connection --')
+ "--wp-attach",
+ action="store_true",
+ dest="wp_attach",
+ help="(wordpress import only) Download files uploaded to wordpress as "
+ "attachments. Files will be added to posts as a list in the post "
+ "header. All files will be downloaded, even if "
+ "they aren't associated with a post. Files will be downloaded "
+ "with their original path inside the output directory. "
+ "e.g. output/wp-uploads/date/postname/file.jpg "
+ "-- Requires an internet connection --",
+ )
parser.add_argument(
- '--disable-slugs', action='store_true',
- dest='disable_slugs',
- help='Disable storing slugs from imported posts within output. '
- 'With this disabled, your Pelican URLs may not be consistent '
- 'with your original posts.')
+ "--disable-slugs",
+ action="store_true",
+ dest="disable_slugs",
+ help="Disable storing slugs from imported posts within output. "
+ "With this disabled, your Pelican URLs may not be consistent "
+ "with your original posts.",
+ )
parser.add_argument(
- '-b', '--blogname', dest='blogname',
- help="Blog name (Tumblr import only)")
+ "-b", "--blogname", dest="blogname", help="Blog name (Tumblr import only)"
+ )
args = parser.parse_args()
input_type = None
if args.blogger:
- input_type = 'blogger'
+ input_type = "blogger"
elif args.dotclear:
- input_type = 'dotclear'
+ input_type = "dotclear"
elif args.tumblr:
- input_type = 'tumblr'
+ input_type = "tumblr"
elif args.wpfile:
- input_type = 'wordpress'
+ input_type = "wordpress"
elif args.feed:
- input_type = 'feed'
+ input_type = "feed"
else:
- error = ('You must provide either --blogger, --dotclear, '
- '--tumblr, --wpfile or --feed options')
+ error = (
+ "You must provide either --blogger, --dotclear, "
+ "--tumblr, --wpfile or --feed options"
+ )
exit(error)
if not os.path.exists(args.output):
try:
os.mkdir(args.output)
except OSError:
- error = 'Unable to create the output folder: ' + args.output
+ error = "Unable to create the output folder: " + args.output
exit(error)
- if args.wp_attach and input_type != 'wordpress':
- error = ('You must be importing a wordpress xml '
- 'to use the --wp-attach option')
+ if args.wp_attach and input_type != "wordpress":
+ error = "You must be importing a wordpress xml " "to use the --wp-attach option"
exit(error)
- if input_type == 'blogger':
+ if input_type == "blogger":
fields = blogger2fields(args.input)
- elif input_type == 'dotclear':
+ elif input_type == "dotclear":
fields = dc2fields(args.input)
- elif input_type == 'tumblr':
+ elif input_type == "tumblr":
fields = tumblr2fields(args.input, args.blogname)
- elif input_type == 'wordpress':
+ elif input_type == "wordpress":
fields = wp2fields(args.input, args.wp_custpost or False)
- elif input_type == 'feed':
+ elif input_type == "feed":
fields = feed2fields(args.input)
if args.wp_attach:
@@ -970,12 +1111,16 @@ def main():
# init logging
init()
- fields2pelican(fields, args.markup, args.output,
- dircat=args.dircat or False,
- dirpage=args.dirpage or False,
- strip_raw=args.strip_raw or False,
- disable_slugs=args.disable_slugs or False,
- filter_author=args.author,
- wp_custpost=args.wp_custpost or False,
- wp_attach=args.wp_attach or False,
- attachments=attachments or None)
+ fields2pelican(
+ fields,
+ args.markup,
+ args.output,
+ dircat=args.dircat or False,
+ dirpage=args.dirpage or False,
+ strip_raw=args.strip_raw or False,
+ disable_slugs=args.disable_slugs or False,
+ filter_author=args.author,
+ wp_custpost=args.wp_custpost or False,
+ wp_attach=args.wp_attach or False,
+ attachments=attachments or None,
+ )
diff --git a/pelican/tools/pelican_quickstart.py b/pelican/tools/pelican_quickstart.py
--- a/pelican/tools/pelican_quickstart.py
+++ b/pelican/tools/pelican_quickstart.py
@@ -19,6 +19,7 @@
try:
import tzlocal
+
if hasattr(tzlocal.get_localzone(), "zone"):
_DEFAULT_TIMEZONE = tzlocal.get_localzone().zone
else:
@@ -28,55 +29,51 @@
from pelican import __version__
-locale.setlocale(locale.LC_ALL, '')
+locale.setlocale(locale.LC_ALL, "")
try:
_DEFAULT_LANGUAGE = locale.getlocale()[0]
except ValueError:
# Don't fail on macosx: "unknown locale: UTF-8"
_DEFAULT_LANGUAGE = None
if _DEFAULT_LANGUAGE is None:
- _DEFAULT_LANGUAGE = 'en'
+ _DEFAULT_LANGUAGE = "en"
else:
- _DEFAULT_LANGUAGE = _DEFAULT_LANGUAGE.split('_')[0]
+ _DEFAULT_LANGUAGE = _DEFAULT_LANGUAGE.split("_")[0]
-_TEMPLATES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
- "templates")
+_TEMPLATES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "templates")
_jinja_env = Environment(
loader=FileSystemLoader(_TEMPLATES_DIR),
trim_blocks=True,
)
-_GITHUB_PAGES_BRANCHES = {
- 'personal': 'main',
- 'project': 'gh-pages'
-}
+_GITHUB_PAGES_BRANCHES = {"personal": "main", "project": "gh-pages"}
CONF = {
- 'pelican': 'pelican',
- 'pelicanopts': '',
- 'basedir': os.curdir,
- 'ftp_host': 'localhost',
- 'ftp_user': 'anonymous',
- 'ftp_target_dir': '/',
- 'ssh_host': 'localhost',
- 'ssh_port': 22,
- 'ssh_user': 'root',
- 'ssh_target_dir': '/var/www',
- 's3_bucket': 'my_s3_bucket',
- 'cloudfiles_username': 'my_rackspace_username',
- 'cloudfiles_api_key': 'my_rackspace_api_key',
- 'cloudfiles_container': 'my_cloudfiles_container',
- 'dropbox_dir': '~/Dropbox/Public/',
- 'github_pages_branch': _GITHUB_PAGES_BRANCHES['project'],
- 'default_pagination': 10,
- 'siteurl': '',
- 'lang': _DEFAULT_LANGUAGE,
- 'timezone': _DEFAULT_TIMEZONE
+ "pelican": "pelican",
+ "pelicanopts": "",
+ "basedir": os.curdir,
+ "ftp_host": "localhost",
+ "ftp_user": "anonymous",
+ "ftp_target_dir": "/",
+ "ssh_host": "localhost",
+ "ssh_port": 22,
+ "ssh_user": "root",
+ "ssh_target_dir": "/var/www",
+ "s3_bucket": "my_s3_bucket",
+ "cloudfiles_username": "my_rackspace_username",
+ "cloudfiles_api_key": "my_rackspace_api_key",
+ "cloudfiles_container": "my_cloudfiles_container",
+ "dropbox_dir": "~/Dropbox/Public/",
+ "github_pages_branch": _GITHUB_PAGES_BRANCHES["project"],
+ "default_pagination": 10,
+ "siteurl": "",
+ "lang": _DEFAULT_LANGUAGE,
+ "timezone": _DEFAULT_TIMEZONE,
}
# url for list of valid timezones
-_TZ_URL = 'https://en.wikipedia.org/wiki/List_of_tz_database_time_zones'
+_TZ_URL = "https://en.wikipedia.org/wiki/List_of_tz_database_time_zones"
# Create a 'marked' default path, to determine if someone has supplied
@@ -90,12 +87,12 @@ class _DEFAULT_PATH_TYPE(str):
def ask(question, answer=str, default=None, length=None):
if answer == str:
- r = ''
+ r = ""
while True:
if default:
- r = input('> {} [{}] '.format(question, default))
+ r = input("> {} [{}] ".format(question, default))
else:
- r = input('> {} '.format(question))
+ r = input("> {} ".format(question))
r = r.strip()
@@ -104,10 +101,10 @@ def ask(question, answer=str, default=None, length=None):
r = default
break
else:
- print('You must enter something')
+ print("You must enter something")
else:
if length and len(r) != length:
- print('Entry must be {} characters long'.format(length))
+ print("Entry must be {} characters long".format(length))
else:
break
@@ -117,18 +114,18 @@ def ask(question, answer=str, default=None, length=None):
r = None
while True:
if default is True:
- r = input('> {} (Y/n) '.format(question))
+ r = input("> {} (Y/n) ".format(question))
elif default is False:
- r = input('> {} (y/N) '.format(question))
+ r = input("> {} (y/N) ".format(question))
else:
- r = input('> {} (y/n) '.format(question))
+ r = input("> {} (y/n) ".format(question))
r = r.strip().lower()
- if r in ('y', 'yes'):
+ if r in ("y", "yes"):
r = True
break
- elif r in ('n', 'no'):
+ elif r in ("n", "no"):
r = False
break
elif not r:
@@ -141,9 +138,9 @@ def ask(question, answer=str, default=None, length=None):
r = None
while True:
if default:
- r = input('> {} [{}] '.format(question, default))
+ r = input("> {} [{}] ".format(question, default))
else:
- r = input('> {} '.format(question))
+ r = input("> {} ".format(question))
r = r.strip()
@@ -155,11 +152,10 @@ def ask(question, answer=str, default=None, length=None):
r = int(r)
break
except ValueError:
- print('You must enter an integer')
+ print("You must enter an integer")
return r
else:
- raise NotImplementedError(
- 'Argument `answer` must be str, bool, or integer')
+ raise NotImplementedError("Argument `answer` must be str, bool, or integer")
def ask_timezone(question, default, tzurl):
@@ -178,162 +174,227 @@ def ask_timezone(question, default, tzurl):
def render_jinja_template(tmpl_name: str, tmpl_vars: Mapping, target_path: str):
try:
- with open(os.path.join(CONF['basedir'], target_path),
- 'w', encoding='utf-8') as fd:
+ with open(
+ os.path.join(CONF["basedir"], target_path), "w", encoding="utf-8"
+ ) as fd:
_template = _jinja_env.get_template(tmpl_name)
fd.write(_template.render(**tmpl_vars))
except OSError as e:
- print('Error: {}'.format(e))
+ print("Error: {}".format(e))
def main():
parser = argparse.ArgumentParser(
description="A kickstarter for Pelican",
- formatter_class=argparse.ArgumentDefaultsHelpFormatter)
- parser.add_argument('-p', '--path', default=_DEFAULT_PATH,
- help="The path to generate the blog into")
- parser.add_argument('-t', '--title', metavar="title",
- help='Set the title of the website')
- parser.add_argument('-a', '--author', metavar="author",
- help='Set the author name of the website')
- parser.add_argument('-l', '--lang', metavar="lang",
- help='Set the default web site language')
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ )
+ parser.add_argument(
+ "-p", "--path", default=_DEFAULT_PATH, help="The path to generate the blog into"
+ )
+ parser.add_argument(
+ "-t", "--title", metavar="title", help="Set the title of the website"
+ )
+ parser.add_argument(
+ "-a", "--author", metavar="author", help="Set the author name of the website"
+ )
+ parser.add_argument(
+ "-l", "--lang", metavar="lang", help="Set the default web site language"
+ )
args = parser.parse_args()
- print('''Welcome to pelican-quickstart v{v}.
+ print(
+ """Welcome to pelican-quickstart v{v}.
This script will help you create a new Pelican-based website.
Please answer the following questions so this script can generate the files
needed by Pelican.
- '''.format(v=__version__))
+ """.format(v=__version__)
+ )
- project = os.path.join(
- os.environ.get('VIRTUAL_ENV', os.curdir), '.project')
- no_path_was_specified = hasattr(args.path, 'is_default_path')
+ project = os.path.join(os.environ.get("VIRTUAL_ENV", os.curdir), ".project")
+ no_path_was_specified = hasattr(args.path, "is_default_path")
if os.path.isfile(project) and no_path_was_specified:
- CONF['basedir'] = open(project).read().rstrip("\n")
- print('Using project associated with current virtual environment. '
- 'Will save to:\n%s\n' % CONF['basedir'])
+ CONF["basedir"] = open(project).read().rstrip("\n")
+ print(
+ "Using project associated with current virtual environment. "
+ "Will save to:\n%s\n" % CONF["basedir"]
+ )
else:
- CONF['basedir'] = os.path.abspath(os.path.expanduser(
- ask('Where do you want to create your new web site?',
- answer=str, default=args.path)))
-
- CONF['sitename'] = ask('What will be the title of this web site?',
- answer=str, default=args.title)
- CONF['author'] = ask('Who will be the author of this web site?',
- answer=str, default=args.author)
- CONF['lang'] = ask('What will be the default language of this web site?',
- str, args.lang or CONF['lang'], 2)
-
- if ask('Do you want to specify a URL prefix? e.g., https://example.com ',
- answer=bool, default=True):
- CONF['siteurl'] = ask('What is your URL prefix? (see '
- 'above example; no trailing slash)',
- str, CONF['siteurl'])
-
- CONF['with_pagination'] = ask('Do you want to enable article pagination?',
- bool, bool(CONF['default_pagination']))
-
- if CONF['with_pagination']:
- CONF['default_pagination'] = ask('How many articles per page '
- 'do you want?',
- int, CONF['default_pagination'])
+ CONF["basedir"] = os.path.abspath(
+ os.path.expanduser(
+ ask(
+ "Where do you want to create your new web site?",
+ answer=str,
+ default=args.path,
+ )
+ )
+ )
+
+ CONF["sitename"] = ask(
+ "What will be the title of this web site?", answer=str, default=args.title
+ )
+ CONF["author"] = ask(
+ "Who will be the author of this web site?", answer=str, default=args.author
+ )
+ CONF["lang"] = ask(
+ "What will be the default language of this web site?",
+ str,
+ args.lang or CONF["lang"],
+ 2,
+ )
+
+ if ask(
+ "Do you want to specify a URL prefix? e.g., https://example.com ",
+ answer=bool,
+ default=True,
+ ):
+ CONF["siteurl"] = ask(
+ "What is your URL prefix? (see " "above example; no trailing slash)",
+ str,
+ CONF["siteurl"],
+ )
+
+ CONF["with_pagination"] = ask(
+ "Do you want to enable article pagination?",
+ bool,
+ bool(CONF["default_pagination"]),
+ )
+
+ if CONF["with_pagination"]:
+ CONF["default_pagination"] = ask(
+ "How many articles per page " "do you want?",
+ int,
+ CONF["default_pagination"],
+ )
else:
- CONF['default_pagination'] = False
+ CONF["default_pagination"] = False
- CONF['timezone'] = ask_timezone('What is your time zone?',
- CONF['timezone'], _TZ_URL)
+ CONF["timezone"] = ask_timezone(
+ "What is your time zone?", CONF["timezone"], _TZ_URL
+ )
- automation = ask('Do you want to generate a tasks.py/Makefile '
- 'to automate generation and publishing?', bool, True)
+ automation = ask(
+ "Do you want to generate a tasks.py/Makefile "
+ "to automate generation and publishing?",
+ bool,
+ True,
+ )
if automation:
- if ask('Do you want to upload your website using FTP?',
- answer=bool, default=False):
- CONF['ftp'] = True,
- CONF['ftp_host'] = ask('What is the hostname of your FTP server?',
- str, CONF['ftp_host'])
- CONF['ftp_user'] = ask('What is your username on that server?',
- str, CONF['ftp_user'])
- CONF['ftp_target_dir'] = ask('Where do you want to put your '
- 'web site on that server?',
- str, CONF['ftp_target_dir'])
- if ask('Do you want to upload your website using SSH?',
- answer=bool, default=False):
- CONF['ssh'] = True,
- CONF['ssh_host'] = ask('What is the hostname of your SSH server?',
- str, CONF['ssh_host'])
- CONF['ssh_port'] = ask('What is the port of your SSH server?',
- int, CONF['ssh_port'])
- CONF['ssh_user'] = ask('What is your username on that server?',
- str, CONF['ssh_user'])
- CONF['ssh_target_dir'] = ask('Where do you want to put your '
- 'web site on that server?',
- str, CONF['ssh_target_dir'])
-
- if ask('Do you want to upload your website using Dropbox?',
- answer=bool, default=False):
- CONF['dropbox'] = True,
- CONF['dropbox_dir'] = ask('Where is your Dropbox directory?',
- str, CONF['dropbox_dir'])
-
- if ask('Do you want to upload your website using S3?',
- answer=bool, default=False):
- CONF['s3'] = True,
- CONF['s3_bucket'] = ask('What is the name of your S3 bucket?',
- str, CONF['s3_bucket'])
-
- if ask('Do you want to upload your website using '
- 'Rackspace Cloud Files?', answer=bool, default=False):
- CONF['cloudfiles'] = True,
- CONF['cloudfiles_username'] = ask('What is your Rackspace '
- 'Cloud username?', str,
- CONF['cloudfiles_username'])
- CONF['cloudfiles_api_key'] = ask('What is your Rackspace '
- 'Cloud API key?', str,
- CONF['cloudfiles_api_key'])
- CONF['cloudfiles_container'] = ask('What is the name of your '
- 'Cloud Files container?',
- str,
- CONF['cloudfiles_container'])
-
- if ask('Do you want to upload your website using GitHub Pages?',
- answer=bool, default=False):
- CONF['github'] = True,
- if ask('Is this your personal page (username.github.io)?',
- answer=bool, default=False):
- CONF['github_pages_branch'] = \
- _GITHUB_PAGES_BRANCHES['personal']
+ if ask(
+ "Do you want to upload your website using FTP?", answer=bool, default=False
+ ):
+ CONF["ftp"] = (True,)
+ CONF["ftp_host"] = ask(
+ "What is the hostname of your FTP server?", str, CONF["ftp_host"]
+ )
+ CONF["ftp_user"] = ask(
+ "What is your username on that server?", str, CONF["ftp_user"]
+ )
+ CONF["ftp_target_dir"] = ask(
+ "Where do you want to put your " "web site on that server?",
+ str,
+ CONF["ftp_target_dir"],
+ )
+ if ask(
+ "Do you want to upload your website using SSH?", answer=bool, default=False
+ ):
+ CONF["ssh"] = (True,)
+ CONF["ssh_host"] = ask(
+ "What is the hostname of your SSH server?", str, CONF["ssh_host"]
+ )
+ CONF["ssh_port"] = ask(
+ "What is the port of your SSH server?", int, CONF["ssh_port"]
+ )
+ CONF["ssh_user"] = ask(
+ "What is your username on that server?", str, CONF["ssh_user"]
+ )
+ CONF["ssh_target_dir"] = ask(
+ "Where do you want to put your " "web site on that server?",
+ str,
+ CONF["ssh_target_dir"],
+ )
+
+ if ask(
+ "Do you want to upload your website using Dropbox?",
+ answer=bool,
+ default=False,
+ ):
+ CONF["dropbox"] = (True,)
+ CONF["dropbox_dir"] = ask(
+ "Where is your Dropbox directory?", str, CONF["dropbox_dir"]
+ )
+
+ if ask(
+ "Do you want to upload your website using S3?", answer=bool, default=False
+ ):
+ CONF["s3"] = (True,)
+ CONF["s3_bucket"] = ask(
+ "What is the name of your S3 bucket?", str, CONF["s3_bucket"]
+ )
+
+ if ask(
+ "Do you want to upload your website using " "Rackspace Cloud Files?",
+ answer=bool,
+ default=False,
+ ):
+ CONF["cloudfiles"] = (True,)
+ CONF["cloudfiles_username"] = ask(
+ "What is your Rackspace " "Cloud username?",
+ str,
+ CONF["cloudfiles_username"],
+ )
+ CONF["cloudfiles_api_key"] = ask(
+ "What is your Rackspace " "Cloud API key?",
+ str,
+ CONF["cloudfiles_api_key"],
+ )
+ CONF["cloudfiles_container"] = ask(
+ "What is the name of your " "Cloud Files container?",
+ str,
+ CONF["cloudfiles_container"],
+ )
+
+ if ask(
+ "Do you want to upload your website using GitHub Pages?",
+ answer=bool,
+ default=False,
+ ):
+ CONF["github"] = (True,)
+ if ask(
+ "Is this your personal page (username.github.io)?",
+ answer=bool,
+ default=False,
+ ):
+ CONF["github_pages_branch"] = _GITHUB_PAGES_BRANCHES["personal"]
else:
- CONF['github_pages_branch'] = \
- _GITHUB_PAGES_BRANCHES['project']
+ CONF["github_pages_branch"] = _GITHUB_PAGES_BRANCHES["project"]
try:
- os.makedirs(os.path.join(CONF['basedir'], 'content'))
+ os.makedirs(os.path.join(CONF["basedir"], "content"))
except OSError as e:
- print('Error: {}'.format(e))
+ print("Error: {}".format(e))
try:
- os.makedirs(os.path.join(CONF['basedir'], 'output'))
+ os.makedirs(os.path.join(CONF["basedir"], "output"))
except OSError as e:
- print('Error: {}'.format(e))
+ print("Error: {}".format(e))
conf_python = dict()
for key, value in CONF.items():
conf_python[key] = repr(value)
- render_jinja_template('pelicanconf.py.jinja2', conf_python, 'pelicanconf.py')
+ render_jinja_template("pelicanconf.py.jinja2", conf_python, "pelicanconf.py")
- render_jinja_template('publishconf.py.jinja2', CONF, 'publishconf.py')
+ render_jinja_template("publishconf.py.jinja2", CONF, "publishconf.py")
if automation:
- render_jinja_template('tasks.py.jinja2', CONF, 'tasks.py')
- render_jinja_template('Makefile.jinja2', CONF, 'Makefile')
+ render_jinja_template("tasks.py.jinja2", CONF, "tasks.py")
+ render_jinja_template("Makefile.jinja2", CONF, "Makefile")
- print('Done. Your new project is available at %s' % CONF['basedir'])
+ print("Done. Your new project is available at %s" % CONF["basedir"])
if __name__ == "__main__":
diff --git a/pelican/tools/pelican_themes.py b/pelican/tools/pelican_themes.py
--- a/pelican/tools/pelican_themes.py
+++ b/pelican/tools/pelican_themes.py
@@ -8,7 +8,7 @@
def err(msg, die=None):
"""Print an error message and exits if an exit code is given"""
- sys.stderr.write(msg + '\n')
+ sys.stderr.write(msg + "\n")
if die:
sys.exit(die if isinstance(die, int) else 1)
@@ -16,62 +16,96 @@ def err(msg, die=None):
try:
import pelican
except ImportError:
- err('Cannot import pelican.\nYou must '
- 'install Pelican in order to run this script.',
- -1)
+ err(
+ "Cannot import pelican.\nYou must "
+ "install Pelican in order to run this script.",
+ -1,
+ )
global _THEMES_PATH
_THEMES_PATH = os.path.join(
- os.path.dirname(
- os.path.abspath(pelican.__file__)
- ),
- 'themes'
+ os.path.dirname(os.path.abspath(pelican.__file__)), "themes"
)
-__version__ = '0.2'
-_BUILTIN_THEMES = ['simple', 'notmyidea']
+__version__ = "0.2"
+_BUILTIN_THEMES = ["simple", "notmyidea"]
def main():
"""Main function"""
- parser = argparse.ArgumentParser(
- description="""Install themes for Pelican""")
+ parser = argparse.ArgumentParser(description="""Install themes for Pelican""")
excl = parser.add_mutually_exclusive_group()
excl.add_argument(
- '-l', '--list', dest='action', action="store_const", const='list',
- help="Show the themes already installed and exit")
+ "-l",
+ "--list",
+ dest="action",
+ action="store_const",
+ const="list",
+ help="Show the themes already installed and exit",
+ )
excl.add_argument(
- '-p', '--path', dest='action', action="store_const", const='path',
- help="Show the themes path and exit")
+ "-p",
+ "--path",
+ dest="action",
+ action="store_const",
+ const="path",
+ help="Show the themes path and exit",
+ )
excl.add_argument(
- '-V', '--version', action='version',
- version='pelican-themes v{}'.format(__version__),
- help='Print the version of this script')
+ "-V",
+ "--version",
+ action="version",
+ version="pelican-themes v{}".format(__version__),
+ help="Print the version of this script",
+ )
parser.add_argument(
- '-i', '--install', dest='to_install', nargs='+', metavar="theme path",
- help='The themes to install')
+ "-i",
+ "--install",
+ dest="to_install",
+ nargs="+",
+ metavar="theme path",
+ help="The themes to install",
+ )
parser.add_argument(
- '-r', '--remove', dest='to_remove', nargs='+', metavar="theme name",
- help='The themes to remove')
+ "-r",
+ "--remove",
+ dest="to_remove",
+ nargs="+",
+ metavar="theme name",
+ help="The themes to remove",
+ )
parser.add_argument(
- '-U', '--upgrade', dest='to_upgrade', nargs='+',
- metavar="theme path", help='The themes to upgrade')
+ "-U",
+ "--upgrade",
+ dest="to_upgrade",
+ nargs="+",
+ metavar="theme path",
+ help="The themes to upgrade",
+ )
parser.add_argument(
- '-s', '--symlink', dest='to_symlink', nargs='+', metavar="theme path",
+ "-s",
+ "--symlink",
+ dest="to_symlink",
+ nargs="+",
+ metavar="theme path",
help="Same as `--install', but create a symbolic link instead of "
- "copying the theme. Useful for theme development")
+ "copying the theme. Useful for theme development",
+ )
parser.add_argument(
- '-c', '--clean', dest='clean', action="store_true",
- help="Remove the broken symbolic links of the theme path")
+ "-c",
+ "--clean",
+ dest="clean",
+ action="store_true",
+ help="Remove the broken symbolic links of the theme path",
+ )
parser.add_argument(
- '-v', '--verbose', dest='verbose',
- action="store_true",
- help="Verbose output")
+ "-v", "--verbose", dest="verbose", action="store_true", help="Verbose output"
+ )
args = parser.parse_args()
@@ -79,46 +113,46 @@ def main():
to_sym = args.to_symlink or args.clean
if args.action:
- if args.action == 'list':
+ if args.action == "list":
list_themes(args.verbose)
- elif args.action == 'path':
+ elif args.action == "path":
print(_THEMES_PATH)
elif to_install or args.to_remove or to_sym:
if args.to_remove:
if args.verbose:
- print('Removing themes...')
+ print("Removing themes...")
for i in args.to_remove:
remove(i, v=args.verbose)
if args.to_install:
if args.verbose:
- print('Installing themes...')
+ print("Installing themes...")
for i in args.to_install:
install(i, v=args.verbose)
if args.to_upgrade:
if args.verbose:
- print('Upgrading themes...')
+ print("Upgrading themes...")
for i in args.to_upgrade:
install(i, v=args.verbose, u=True)
if args.to_symlink:
if args.verbose:
- print('Linking themes...')
+ print("Linking themes...")
for i in args.to_symlink:
symlink(i, v=args.verbose)
if args.clean:
if args.verbose:
- print('Cleaning the themes directory...')
+ print("Cleaning the themes directory...")
clean(v=args.verbose)
else:
- print('No argument given... exiting.')
+ print("No argument given... exiting.")
def themes():
@@ -142,7 +176,7 @@ def list_themes(v=False):
if v:
print(theme_path + (" (symbolic link to `" + link_target + "')"))
else:
- print(theme_path + '@')
+ print(theme_path + "@")
else:
print(theme_path)
@@ -150,51 +184,52 @@ def list_themes(v=False):
def remove(theme_name, v=False):
"""Removes a theme"""
- theme_name = theme_name.replace('/', '')
+ theme_name = theme_name.replace("/", "")
target = os.path.join(_THEMES_PATH, theme_name)
if theme_name in _BUILTIN_THEMES:
- err(theme_name + ' is a builtin theme.\n'
- 'You cannot remove a builtin theme with this script, '
- 'remove it by hand if you want.')
+ err(
+ theme_name + " is a builtin theme.\n"
+ "You cannot remove a builtin theme with this script, "
+ "remove it by hand if you want."
+ )
elif os.path.islink(target):
if v:
- print('Removing link `' + target + "'")
+ print("Removing link `" + target + "'")
os.remove(target)
elif os.path.isdir(target):
if v:
- print('Removing directory `' + target + "'")
+ print("Removing directory `" + target + "'")
shutil.rmtree(target)
elif os.path.exists(target):
- err(target + ' : not a valid theme')
+ err(target + " : not a valid theme")
else:
- err(target + ' : no such file or directory')
+ err(target + " : no such file or directory")
def install(path, v=False, u=False):
"""Installs a theme"""
if not os.path.exists(path):
- err(path + ' : no such file or directory')
+ err(path + " : no such file or directory")
elif not os.path.isdir(path):
- err(path + ' : not a directory')
+ err(path + " : not a directory")
else:
theme_name = os.path.basename(os.path.normpath(path))
theme_path = os.path.join(_THEMES_PATH, theme_name)
exists = os.path.exists(theme_path)
if exists and not u:
- err(path + ' : already exists')
+ err(path + " : already exists")
elif exists:
remove(theme_name, v)
install(path, v)
else:
if v:
- print("Copying '{p}' to '{t}' ...".format(p=path,
- t=theme_path))
+ print("Copying '{p}' to '{t}' ...".format(p=path, t=theme_path))
try:
shutil.copytree(path, theme_path)
try:
- if os.name == 'posix':
+ if os.name == "posix":
for root, dirs, files in os.walk(theme_path):
for d in dirs:
dname = os.path.join(root, d)
@@ -203,35 +238,41 @@ def install(path, v=False, u=False):
fname = os.path.join(root, f)
os.chmod(fname, 420) # 0o644
except OSError as e:
- err("Cannot change permissions of files "
- "or directory in `{r}':\n{e}".format(r=theme_path,
- e=str(e)),
- die=False)
+ err(
+ "Cannot change permissions of files "
+ "or directory in `{r}':\n{e}".format(r=theme_path, e=str(e)),
+ die=False,
+ )
except Exception as e:
- err("Cannot copy `{p}' to `{t}':\n{e}".format(
- p=path, t=theme_path, e=str(e)))
+ err(
+ "Cannot copy `{p}' to `{t}':\n{e}".format(
+ p=path, t=theme_path, e=str(e)
+ )
+ )
def symlink(path, v=False):
"""Symbolically link a theme"""
if not os.path.exists(path):
- err(path + ' : no such file or directory')
+ err(path + " : no such file or directory")
elif not os.path.isdir(path):
- err(path + ' : not a directory')
+ err(path + " : not a directory")
else:
theme_name = os.path.basename(os.path.normpath(path))
theme_path = os.path.join(_THEMES_PATH, theme_name)
if os.path.exists(theme_path):
- err(path + ' : already exists')
+ err(path + " : already exists")
else:
if v:
- print("Linking `{p}' to `{t}' ...".format(
- p=path, t=theme_path))
+ print("Linking `{p}' to `{t}' ...".format(p=path, t=theme_path))
try:
os.symlink(path, theme_path)
except Exception as e:
- err("Cannot link `{p}' to `{t}':\n{e}".format(
- p=path, t=theme_path, e=str(e)))
+ err(
+ "Cannot link `{p}' to `{t}':\n{e}".format(
+ p=path, t=theme_path, e=str(e)
+ )
+ )
def is_broken_link(path):
@@ -247,11 +288,11 @@ def clean(v=False):
path = os.path.join(_THEMES_PATH, path)
if os.path.islink(path) and is_broken_link(path):
if v:
- print('Removing {}'.format(path))
+ print("Removing {}".format(path))
try:
os.remove(path)
except OSError:
- print('Error: cannot remove {}'.format(path))
+ print("Error: cannot remove {}".format(path))
else:
c += 1
diff --git a/pelican/urlwrappers.py b/pelican/urlwrappers.py
--- a/pelican/urlwrappers.py
+++ b/pelican/urlwrappers.py
@@ -31,17 +31,16 @@ def name(self, name):
@property
def slug(self):
if self._slug is None:
- class_key = '{}_REGEX_SUBSTITUTIONS'.format(
- self.__class__.__name__.upper())
+ class_key = "{}_REGEX_SUBSTITUTIONS".format(self.__class__.__name__.upper())
regex_subs = self.settings.get(
- class_key,
- self.settings.get('SLUG_REGEX_SUBSTITUTIONS', []))
- preserve_case = self.settings.get('SLUGIFY_PRESERVE_CASE', False)
+ class_key, self.settings.get("SLUG_REGEX_SUBSTITUTIONS", [])
+ )
+ preserve_case = self.settings.get("SLUGIFY_PRESERVE_CASE", False)
self._slug = slugify(
self.name,
regex_subs=regex_subs,
preserve_case=preserve_case,
- use_unicode=self.settings.get('SLUGIFY_USE_UNICODE', False)
+ use_unicode=self.settings.get("SLUGIFY_USE_UNICODE", False),
)
return self._slug
@@ -53,26 +52,26 @@ def slug(self, slug):
def as_dict(self):
d = self.__dict__
- d['name'] = self.name
- d['slug'] = self.slug
+ d["name"] = self.name
+ d["slug"] = self.slug
return d
def __hash__(self):
return hash(self.slug)
def _normalize_key(self, key):
- class_key = '{}_REGEX_SUBSTITUTIONS'.format(
- self.__class__.__name__.upper())
+ class_key = "{}_REGEX_SUBSTITUTIONS".format(self.__class__.__name__.upper())
regex_subs = self.settings.get(
- class_key,
- self.settings.get('SLUG_REGEX_SUBSTITUTIONS', []))
- use_unicode = self.settings.get('SLUGIFY_USE_UNICODE', False)
- preserve_case = self.settings.get('SLUGIFY_PRESERVE_CASE', False)
+ class_key, self.settings.get("SLUG_REGEX_SUBSTITUTIONS", [])
+ )
+ use_unicode = self.settings.get("SLUGIFY_USE_UNICODE", False)
+ preserve_case = self.settings.get("SLUGIFY_PRESERVE_CASE", False)
return slugify(
key,
regex_subs=regex_subs,
preserve_case=preserve_case,
- use_unicode=use_unicode)
+ use_unicode=use_unicode,
+ )
def __eq__(self, other):
if isinstance(other, self.__class__):
@@ -99,7 +98,7 @@ def __str__(self):
return self.name
def __repr__(self):
- return '<{} {}>'.format(type(self).__name__, repr(self._name))
+ return "<{} {}>".format(type(self).__name__, repr(self._name))
def _from_settings(self, key, get_page_name=False):
"""Returns URL information as defined in settings.
@@ -114,7 +113,7 @@ def _from_settings(self, key, get_page_name=False):
if isinstance(value, pathlib.Path):
value = str(value)
if not isinstance(value, str):
- logger.warning('%s is set to %s', setting, value)
+ logger.warning("%s is set to %s", setting, value)
return value
else:
if get_page_name:
@@ -122,10 +121,11 @@ def _from_settings(self, key, get_page_name=False):
else:
return value.format(**self.as_dict())
- page_name = property(functools.partial(_from_settings, key='URL',
- get_page_name=True))
- url = property(functools.partial(_from_settings, key='URL'))
- save_as = property(functools.partial(_from_settings, key='SAVE_AS'))
+ page_name = property(
+ functools.partial(_from_settings, key="URL", get_page_name=True)
+ )
+ url = property(functools.partial(_from_settings, key="URL"))
+ save_as = property(functools.partial(_from_settings, key="SAVE_AS"))
class Category(URLWrapper):
diff --git a/pelican/utils.py b/pelican/utils.py
--- a/pelican/utils.py
+++ b/pelican/utils.py
@@ -32,38 +32,37 @@
def sanitised_join(base_directory, *parts):
- joined = posixize_path(
- os.path.abspath(os.path.join(base_directory, *parts)))
+ joined = posixize_path(os.path.abspath(os.path.join(base_directory, *parts)))
base = posixize_path(os.path.abspath(base_directory))
if not joined.startswith(base):
raise RuntimeError(
- "Attempted to break out of output directory to {}".format(
- joined
- )
+ "Attempted to break out of output directory to {}".format(joined)
)
return joined
def strftime(date, date_format):
- '''
+ """
Enhanced replacement for built-in strftime with zero stripping
This works by 'grabbing' possible format strings (those starting with %),
formatting them with the date, stripping any leading zeros if - prefix is
used and replacing formatted output back.
- '''
+ """
+
def strip_zeros(x):
- return x.lstrip('0') or '0'
+ return x.lstrip("0") or "0"
+
# includes ISO date parameters added by Python 3.6
- c89_directives = 'aAbBcdfGHIjmMpSUuVwWxXyYzZ%'
+ c89_directives = "aAbBcdfGHIjmMpSUuVwWxXyYzZ%"
# grab candidate format options
- format_options = '%[-]?.'
+ format_options = "%[-]?."
candidates = re.findall(format_options, date_format)
# replace candidates with placeholders for later % formatting
- template = re.sub(format_options, '%s', date_format)
+ template = re.sub(format_options, "%s", date_format)
formatted_candidates = []
for candidate in candidates:
@@ -72,7 +71,7 @@ def strip_zeros(x):
# check for '-' prefix
if len(candidate) == 3:
# '-' prefix
- candidate = '%{}'.format(candidate[-1])
+ candidate = "%{}".format(candidate[-1])
conversion = strip_zeros
else:
conversion = None
@@ -95,10 +94,10 @@ def strip_zeros(x):
class SafeDatetime(datetime.datetime):
- '''Subclass of datetime that works with utf-8 format strings on PY2'''
+ """Subclass of datetime that works with utf-8 format strings on PY2"""
def strftime(self, fmt, safe=True):
- '''Uses our custom strftime if supposed to be *safe*'''
+ """Uses our custom strftime if supposed to be *safe*"""
if safe:
return strftime(self, fmt)
else:
@@ -106,22 +105,21 @@ def strftime(self, fmt, safe=True):
class DateFormatter:
- '''A date formatter object used as a jinja filter
+ """A date formatter object used as a jinja filter
Uses the `strftime` implementation and makes sure jinja uses the locale
defined in LOCALE setting
- '''
+ """
def __init__(self):
self.locale = locale.setlocale(locale.LC_TIME)
def __call__(self, date, date_format):
-
# on OSX, encoding from LC_CTYPE determines the unicode output in PY3
# make sure it's same as LC_TIME
- with temporary_locale(self.locale, locale.LC_TIME), \
- temporary_locale(self.locale, locale.LC_CTYPE):
-
+ with temporary_locale(self.locale, locale.LC_TIME), temporary_locale(
+ self.locale, locale.LC_CTYPE
+ ):
formatted = strftime(date, date_format)
return formatted
@@ -155,7 +153,7 @@ def __repr__(self):
return self.func.__doc__
def __get__(self, obj, objtype):
- '''Support instance methods.'''
+ """Support instance methods."""
fn = partial(self.__call__, obj)
fn.cache = self.cache
return fn
@@ -177,17 +175,16 @@ def __init__(self):
Note that the decorator needs a dummy method to attach to, but the
content of the dummy method is ignored.
"""
+
def _warn():
- version = '.'.join(str(x) for x in since)
- message = ['{} has been deprecated since {}'.format(old, version)]
+ version = ".".join(str(x) for x in since)
+ message = ["{} has been deprecated since {}".format(old, version)]
if remove:
- version = '.'.join(str(x) for x in remove)
- message.append(
- ' and will be removed by version {}'.format(version))
- message.append('. Use {} instead.'.format(new))
- logger.warning(''.join(message))
- logger.debug(''.join(str(x) for x
- in traceback.format_stack()))
+ version = ".".join(str(x) for x in remove)
+ message.append(" and will be removed by version {}".format(version))
+ message.append(". Use {} instead.".format(new))
+ logger.warning("".join(message))
+ logger.debug("".join(str(x) for x in traceback.format_stack()))
def fget(self):
_warn()
@@ -208,21 +205,20 @@ def get_date(string):
If no format matches the given date, raise a ValueError.
"""
- string = re.sub(' +', ' ', string)
- default = SafeDatetime.now().replace(hour=0, minute=0,
- second=0, microsecond=0)
+ string = re.sub(" +", " ", string)
+ default = SafeDatetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
try:
return dateutil.parser.parse(string, default=default)
except (TypeError, ValueError):
- raise ValueError('{!r} is not a valid date'.format(string))
+ raise ValueError("{!r} is not a valid date".format(string))
@contextmanager
-def pelican_open(filename, mode='r', strip_crs=(sys.platform == 'win32')):
+def pelican_open(filename, mode="r", strip_crs=(sys.platform == "win32")):
"""Open a file and return its content"""
# utf-8-sig will clear any BOM if present
- with open(filename, mode, encoding='utf-8-sig') as infile:
+ with open(filename, mode, encoding="utf-8-sig") as infile:
content = infile.read()
yield content
@@ -244,7 +240,7 @@ def slugify(value, regex_subs=(), preserve_case=False, use_unicode=False):
def normalize_unicode(text):
# normalize text by compatibility composition
# see: https://en.wikipedia.org/wiki/Unicode_equivalence
- return unicodedata.normalize('NFKC', text)
+ return unicodedata.normalize("NFKC", text)
# strip tags from value
value = Markup(value).striptags()
@@ -259,10 +255,8 @@ def normalize_unicode(text):
# perform regex substitutions
for src, dst in regex_subs:
value = re.sub(
- normalize_unicode(src),
- normalize_unicode(dst),
- value,
- flags=re.IGNORECASE)
+ normalize_unicode(src), normalize_unicode(dst), value, flags=re.IGNORECASE
+ )
if not preserve_case:
value = value.lower()
@@ -283,8 +277,7 @@ def copy(source, destination, ignores=None):
"""
def walk_error(err):
- logger.warning("While copying %s: %s: %s",
- source_, err.filename, err.strerror)
+ logger.warning("While copying %s: %s: %s", source_, err.filename, err.strerror)
source_ = os.path.abspath(os.path.expanduser(source))
destination_ = os.path.abspath(os.path.expanduser(destination))
@@ -292,39 +285,40 @@ def walk_error(err):
if ignores is None:
ignores = []
- if any(fnmatch.fnmatch(os.path.basename(source), ignore)
- for ignore in ignores):
- logger.info('Not copying %s due to ignores', source_)
+ if any(fnmatch.fnmatch(os.path.basename(source), ignore) for ignore in ignores):
+ logger.info("Not copying %s due to ignores", source_)
return
if os.path.isfile(source_):
dst_dir = os.path.dirname(destination_)
if not os.path.exists(dst_dir):
- logger.info('Creating directory %s', dst_dir)
+ logger.info("Creating directory %s", dst_dir)
os.makedirs(dst_dir)
- logger.info('Copying %s to %s', source_, destination_)
+ logger.info("Copying %s to %s", source_, destination_)
copy_file(source_, destination_)
elif os.path.isdir(source_):
if not os.path.exists(destination_):
- logger.info('Creating directory %s', destination_)
+ logger.info("Creating directory %s", destination_)
os.makedirs(destination_)
if not os.path.isdir(destination_):
- logger.warning('Cannot copy %s (a directory) to %s (a file)',
- source_, destination_)
+ logger.warning(
+ "Cannot copy %s (a directory) to %s (a file)", source_, destination_
+ )
return
for src_dir, subdirs, others in os.walk(source_, followlinks=True):
- dst_dir = os.path.join(destination_,
- os.path.relpath(src_dir, source_))
+ dst_dir = os.path.join(destination_, os.path.relpath(src_dir, source_))
- subdirs[:] = (s for s in subdirs if not any(fnmatch.fnmatch(s, i)
- for i in ignores))
- others[:] = (o for o in others if not any(fnmatch.fnmatch(o, i)
- for i in ignores))
+ subdirs[:] = (
+ s for s in subdirs if not any(fnmatch.fnmatch(s, i) for i in ignores)
+ )
+ others[:] = (
+ o for o in others if not any(fnmatch.fnmatch(o, i) for i in ignores)
+ )
if not os.path.isdir(dst_dir):
- logger.info('Creating directory %s', dst_dir)
+ logger.info("Creating directory %s", dst_dir)
# Parent directories are known to exist, so 'mkdir' suffices.
os.mkdir(dst_dir)
@@ -332,21 +326,24 @@ def walk_error(err):
src_path = os.path.join(src_dir, o)
dst_path = os.path.join(dst_dir, o)
if os.path.isfile(src_path):
- logger.info('Copying %s to %s', src_path, dst_path)
+ logger.info("Copying %s to %s", src_path, dst_path)
copy_file(src_path, dst_path)
else:
- logger.warning('Skipped copy %s (not a file or '
- 'directory) to %s',
- src_path, dst_path)
+ logger.warning(
+ "Skipped copy %s (not a file or " "directory) to %s",
+ src_path,
+ dst_path,
+ )
def copy_file(source, destination):
- '''Copy a file'''
+ """Copy a file"""
try:
shutil.copyfile(source, destination)
except OSError as e:
- logger.warning("A problem occurred copying file %s to %s; %s",
- source, destination, e)
+ logger.warning(
+ "A problem occurred copying file %s to %s; %s", source, destination, e
+ )
def clean_output_dir(path, retention):
@@ -367,15 +364,15 @@ def clean_output_dir(path, retention):
for filename in os.listdir(path):
file = os.path.join(path, filename)
if any(filename == retain for retain in retention):
- logger.debug("Skipping deletion; %s is on retention list: %s",
- filename, file)
+ logger.debug(
+ "Skipping deletion; %s is on retention list: %s", filename, file
+ )
elif os.path.isdir(file):
try:
shutil.rmtree(file)
logger.debug("Deleted directory %s", file)
except Exception as e:
- logger.error("Unable to delete directory %s; %s",
- file, e)
+ logger.error("Unable to delete directory %s; %s", file, e)
elif os.path.isfile(file) or os.path.islink(file):
try:
os.remove(file)
@@ -407,29 +404,31 @@ def posixize_path(rel_path):
"""Use '/' as path separator, so that source references,
like '{static}/foo/bar.jpg' or 'extras/favicon.ico',
will work on Windows as well as on Mac and Linux."""
- return rel_path.replace(os.sep, '/')
+ return rel_path.replace(os.sep, "/")
class _HTMLWordTruncator(HTMLParser):
-
- _word_regex = re.compile(r"{DBC}|(\w[\w'-]*)".format(
- # DBC means CJK-like characters. An character can stand for a word.
- DBC=("([\u4E00-\u9FFF])|" # CJK Unified Ideographs
- "([\u3400-\u4DBF])|" # CJK Unified Ideographs Extension A
- "([\uF900-\uFAFF])|" # CJK Compatibility Ideographs
- "([\U00020000-\U0002A6DF])|" # CJK Unified Ideographs Extension B
- "([\U0002F800-\U0002FA1F])|" # CJK Compatibility Ideographs Supplement
- "([\u3040-\u30FF])|" # Hiragana and Katakana
- "([\u1100-\u11FF])|" # Hangul Jamo
- "([\uAC00-\uD7FF])|" # Hangul Compatibility Jamo
- "([\u3130-\u318F])" # Hangul Syllables
- )), re.UNICODE)
- _word_prefix_regex = re.compile(r'\w', re.U)
- _singlets = ('br', 'col', 'link', 'base', 'img', 'param', 'area',
- 'hr', 'input')
+ _word_regex = re.compile(
+ r"{DBC}|(\w[\w'-]*)".format(
+ # DBC means CJK-like characters. An character can stand for a word.
+ DBC=(
+ "([\u4E00-\u9FFF])|" # CJK Unified Ideographs
+ "([\u3400-\u4DBF])|" # CJK Unified Ideographs Extension A
+ "([\uF900-\uFAFF])|" # CJK Compatibility Ideographs
+ "([\U00020000-\U0002A6DF])|" # CJK Unified Ideographs Extension B
+ "([\U0002F800-\U0002FA1F])|" # CJK Compatibility Ideographs Supplement
+ "([\u3040-\u30FF])|" # Hiragana and Katakana
+ "([\u1100-\u11FF])|" # Hangul Jamo
+ "([\uAC00-\uD7FF])|" # Hangul Compatibility Jamo
+ "([\u3130-\u318F])" # Hangul Syllables
+ )
+ ),
+ re.UNICODE,
+ )
+ _word_prefix_regex = re.compile(r"\w", re.U)
+ _singlets = ("br", "col", "link", "base", "img", "param", "area", "hr", "input")
class TruncationCompleted(Exception):
-
def __init__(self, truncate_at):
super().__init__(truncate_at)
self.truncate_at = truncate_at
@@ -455,7 +454,7 @@ def getoffset(self):
line_start = 0
lineno, line_offset = self.getpos()
for i in range(lineno - 1):
- line_start = self.rawdata.index('\n', line_start) + 1
+ line_start = self.rawdata.index("\n", line_start) + 1
return line_start + line_offset
def add_word(self, word_end):
@@ -482,7 +481,7 @@ def handle_endtag(self, tag):
else:
# SGML: An end tag closes, back to the matching start tag,
# all unclosed intervening start tags with omitted end tags
- del self.open_tags[:i + 1]
+ del self.open_tags[: i + 1]
def handle_data(self, data):
word_end = 0
@@ -531,7 +530,7 @@ def _handle_ref(self, name, char):
ref_end = offset + len(name) + 1
try:
- if self.rawdata[ref_end] == ';':
+ if self.rawdata[ref_end] == ";":
ref_end += 1
except IndexError:
# We are at the end of the string and there's no ';'
@@ -556,7 +555,7 @@ def handle_entityref(self, name):
codepoint = entities.name2codepoint[name]
char = chr(codepoint)
except KeyError:
- char = ''
+ char = ""
self._handle_ref(name, char)
def handle_charref(self, name):
@@ -567,17 +566,17 @@ def handle_charref(self, name):
`#x2014`)
"""
try:
- if name.startswith('x'):
+ if name.startswith("x"):
codepoint = int(name[1:], 16)
else:
codepoint = int(name)
char = chr(codepoint)
except (ValueError, OverflowError):
- char = ''
- self._handle_ref('#' + name, char)
+ char = ""
+ self._handle_ref("#" + name, char)
-def truncate_html_words(s, num, end_text='…'):
+def truncate_html_words(s, num, end_text="…"):
"""Truncates HTML to a certain number of words.
(not counting tags and comments). Closes opened tags if they were correctly
@@ -588,23 +587,23 @@ def truncate_html_words(s, num, end_text='…'):
"""
length = int(num)
if length <= 0:
- return ''
+ return ""
truncator = _HTMLWordTruncator(length)
truncator.feed(s)
if truncator.truncate_at is None:
return s
- out = s[:truncator.truncate_at]
+ out = s[: truncator.truncate_at]
if end_text:
- out += ' ' + end_text
+ out += " " + end_text
# Close any tags still open
for tag in truncator.open_tags:
- out += '</%s>' % tag
+ out += "</%s>" % tag
# Return string
return out
def process_translations(content_list, translation_id=None):
- """ Finds translations and returns them.
+ """Finds translations and returns them.
For each content_list item, populates the 'translations' attribute, and
returns a tuple with two lists (index, translations). Index list includes
@@ -632,19 +631,23 @@ def process_translations(content_list, translation_id=None):
try:
content_list.sort(key=attrgetter(*translation_id))
except TypeError:
- raise TypeError('Cannot unpack {}, \'translation_id\' must be falsy, a'
- ' string or a collection of strings'
- .format(translation_id))
+ raise TypeError(
+ "Cannot unpack {}, 'translation_id' must be falsy, a"
+ " string or a collection of strings".format(translation_id)
+ )
except AttributeError:
- raise AttributeError('Cannot use {} as \'translation_id\', there '
- 'appear to be items without these metadata '
- 'attributes'.format(translation_id))
+ raise AttributeError(
+ "Cannot use {} as 'translation_id', there "
+ "appear to be items without these metadata "
+ "attributes".format(translation_id)
+ )
for id_vals, items in groupby(content_list, attrgetter(*translation_id)):
# prepare warning string
id_vals = (id_vals,) if len(translation_id) == 1 else id_vals
- with_str = 'with' + ', '.join([' {} "{{}}"'] * len(translation_id))\
- .format(*translation_id).format(*id_vals)
+ with_str = "with" + ", ".join([' {} "{{}}"'] * len(translation_id)).format(
+ *translation_id
+ ).format(*id_vals)
items = list(items)
original_items = get_original_items(items, with_str)
@@ -662,24 +665,24 @@ def _warn_source_paths(msg, items, *extra):
args = [len(items)]
args.extend(extra)
args.extend(x.source_path for x in items)
- logger.warning('{}: {}'.format(msg, '\n%s' * len(items)), *args)
+ logger.warning("{}: {}".format(msg, "\n%s" * len(items)), *args)
# warn if several items have the same lang
- for lang, lang_items in groupby(items, attrgetter('lang')):
+ for lang, lang_items in groupby(items, attrgetter("lang")):
lang_items = list(lang_items)
if len(lang_items) > 1:
- _warn_source_paths('There are %s items "%s" with lang %s',
- lang_items, with_str, lang)
+ _warn_source_paths(
+ 'There are %s items "%s" with lang %s', lang_items, with_str, lang
+ )
# items with `translation` metadata will be used as translations...
candidate_items = [
- i for i in items
- if i.metadata.get('translation', 'false').lower() == 'false']
+ i for i in items if i.metadata.get("translation", "false").lower() == "false"
+ ]
# ...unless all items with that slug are translations
if not candidate_items:
- _warn_source_paths('All items ("%s") "%s" are translations',
- items, with_str)
+ _warn_source_paths('All items ("%s") "%s" are translations', items, with_str)
candidate_items = items
# find items with default language
@@ -691,13 +694,14 @@ def _warn_source_paths(msg, items, *extra):
# warn if there are several original items
if len(original_items) > 1:
- _warn_source_paths('There are %s original (not translated) items %s',
- original_items, with_str)
+ _warn_source_paths(
+ "There are %s original (not translated) items %s", original_items, with_str
+ )
return original_items
-def order_content(content_list, order_by='slug'):
- """ Sorts content.
+def order_content(content_list, order_by="slug"):
+ """Sorts content.
order_by can be a string of an attribute or sorting function. If order_by
is defined, content will be ordered by that attribute or sorting function.
@@ -713,22 +717,22 @@ def order_content(content_list, order_by='slug'):
try:
content_list.sort(key=order_by)
except Exception:
- logger.error('Error sorting with function %s', order_by)
+ logger.error("Error sorting with function %s", order_by)
elif isinstance(order_by, str):
- if order_by.startswith('reversed-'):
+ if order_by.startswith("reversed-"):
order_reversed = True
- order_by = order_by.replace('reversed-', '', 1)
+ order_by = order_by.replace("reversed-", "", 1)
else:
order_reversed = False
- if order_by == 'basename':
+ if order_by == "basename":
content_list.sort(
- key=lambda x: os.path.basename(x.source_path or ''),
- reverse=order_reversed)
+ key=lambda x: os.path.basename(x.source_path or ""),
+ reverse=order_reversed,
+ )
else:
try:
- content_list.sort(key=attrgetter(order_by),
- reverse=order_reversed)
+ content_list.sort(key=attrgetter(order_by), reverse=order_reversed)
except AttributeError:
for content in content_list:
try:
@@ -736,26 +740,31 @@ def order_content(content_list, order_by='slug'):
except AttributeError:
logger.warning(
'There is no "%s" attribute in "%s". '
- 'Defaulting to slug order.',
+ "Defaulting to slug order.",
order_by,
content.get_relative_source_path(),
extra={
- 'limit_msg': ('More files are missing '
- 'the needed attribute.')
- })
+ "limit_msg": (
+ "More files are missing "
+ "the needed attribute."
+ )
+ },
+ )
else:
logger.warning(
- 'Invalid *_ORDER_BY setting (%s). '
- 'Valid options are strings and functions.', order_by)
+ "Invalid *_ORDER_BY setting (%s). "
+ "Valid options are strings and functions.",
+ order_by,
+ )
return content_list
def wait_for_changes(settings_file, reader_class, settings):
- content_path = settings.get('PATH', '')
- theme_path = settings.get('THEME', '')
+ content_path = settings.get("PATH", "")
+ theme_path = settings.get("THEME", "")
ignore_files = set(
- fnmatch.translate(pattern) for pattern in settings.get('IGNORE_FILES', [])
+ fnmatch.translate(pattern) for pattern in settings.get("IGNORE_FILES", [])
)
candidate_paths = [
@@ -765,7 +774,7 @@ def wait_for_changes(settings_file, reader_class, settings):
]
candidate_paths.extend(
- os.path.join(content_path, path) for path in settings.get('STATIC_PATHS', [])
+ os.path.join(content_path, path) for path in settings.get("STATIC_PATHS", [])
)
watching_paths = []
@@ -778,11 +787,13 @@ def wait_for_changes(settings_file, reader_class, settings):
else:
watching_paths.append(path)
- return next(watchfiles.watch(
- *watching_paths,
- watch_filter=watchfiles.DefaultFilter(ignore_entity_patterns=ignore_files),
- rust_timeout=0
- ))
+ return next(
+ watchfiles.watch(
+ *watching_paths,
+ watch_filter=watchfiles.DefaultFilter(ignore_entity_patterns=ignore_files),
+ rust_timeout=0,
+ )
+ )
def set_date_tzinfo(d, tz_name=None):
@@ -811,7 +822,7 @@ def split_all(path):
"""
if isinstance(path, str):
components = []
- path = path.lstrip('/')
+ path = path.lstrip("/")
while path:
head, tail = os.path.split(path)
if tail:
@@ -827,32 +838,30 @@ def split_all(path):
return None
else:
raise TypeError(
- '"path" was {}, must be string, None, or pathlib.Path'.format(
- type(path)
- )
+ '"path" was {}, must be string, None, or pathlib.Path'.format(type(path))
)
def is_selected_for_writing(settings, path):
- '''Check whether path is selected for writing
+ """Check whether path is selected for writing
according to the WRITE_SELECTED list
If WRITE_SELECTED is an empty list (default),
any path is selected for writing.
- '''
- if settings['WRITE_SELECTED']:
- return path in settings['WRITE_SELECTED']
+ """
+ if settings["WRITE_SELECTED"]:
+ return path in settings["WRITE_SELECTED"]
else:
return True
def path_to_file_url(path):
- '''Convert file-system path to file:// URL'''
+ """Convert file-system path to file:// URL"""
return urllib.parse.urljoin("file://", urllib.request.pathname2url(path))
def maybe_pluralize(count, singular, plural):
- '''
+ """
Returns a formatted string containing count and plural if count is not 1
Returns count and singular if count is 1
@@ -860,22 +869,22 @@ def maybe_pluralize(count, singular, plural):
maybe_pluralize(1, 'Article', 'Articles') -> '1 Article'
maybe_pluralize(2, 'Article', 'Articles') -> '2 Articles'
- '''
+ """
selection = plural
if count == 1:
selection = singular
- return '{} {}'.format(count, selection)
+ return "{} {}".format(count, selection)
@contextmanager
def temporary_locale(temp_locale=None, lc_category=locale.LC_ALL):
- '''
+ """
Enable code to run in a context with a temporary locale
Resets the locale back when exiting context.
Use tests.support.TestCaseWithCLocale if you want every unit test in a
class to use the C locale.
- '''
+ """
orig_locale = locale.setlocale(lc_category)
if temp_locale:
locale.setlocale(lc_category, temp_locale)
diff --git a/pelican/writers.py b/pelican/writers.py
--- a/pelican/writers.py
+++ b/pelican/writers.py
@@ -9,14 +9,18 @@
from pelican.paginator import Paginator
from pelican.plugins import signals
-from pelican.utils import (get_relative_path, is_selected_for_writing,
- path_to_url, sanitised_join, set_date_tzinfo)
+from pelican.utils import (
+ get_relative_path,
+ is_selected_for_writing,
+ path_to_url,
+ sanitised_join,
+ set_date_tzinfo,
+)
logger = logging.getLogger(__name__)
class Writer:
-
def __init__(self, output_path, settings=None):
self.output_path = output_path
self.reminder = dict()
@@ -25,24 +29,26 @@ def __init__(self, output_path, settings=None):
self._overridden_files = set()
# See Content._link_replacer for details
- if "RELATIVE_URLS" in self.settings and self.settings['RELATIVE_URLS']:
+ if "RELATIVE_URLS" in self.settings and self.settings["RELATIVE_URLS"]:
self.urljoiner = posix_join
else:
self.urljoiner = lambda base, url: urljoin(
- base if base.endswith('/') else base + '/', str(url))
+ base if base.endswith("/") else base + "/", str(url)
+ )
def _create_new_feed(self, feed_type, feed_title, context):
- feed_class = Rss201rev2Feed if feed_type == 'rss' else Atom1Feed
+ feed_class = Rss201rev2Feed if feed_type == "rss" else Atom1Feed
if feed_title:
- feed_title = context['SITENAME'] + ' - ' + feed_title
+ feed_title = context["SITENAME"] + " - " + feed_title
else:
- feed_title = context['SITENAME']
+ feed_title = context["SITENAME"]
return feed_class(
title=Markup(feed_title).striptags(),
- link=(self.site_url + '/'),
+ link=(self.site_url + "/"),
feed_url=self.feed_url,
- description=context.get('SITESUBTITLE', ''),
- subtitle=context.get('SITESUBTITLE', None))
+ description=context.get("SITESUBTITLE", ""),
+ subtitle=context.get("SITESUBTITLE", None),
+ )
def _add_item_to_the_feed(self, feed, item):
title = Markup(item.title).striptags()
@@ -52,7 +58,7 @@ def _add_item_to_the_feed(self, feed, item):
# RSS feeds use a single tag called 'description' for both the full
# content and the summary
content = None
- if self.settings.get('RSS_FEED_SUMMARY_ONLY'):
+ if self.settings.get("RSS_FEED_SUMMARY_ONLY"):
description = item.summary
else:
description = item.get_content(self.site_url)
@@ -71,9 +77,9 @@ def _add_item_to_the_feed(self, feed, item):
description = None
categories = []
- if hasattr(item, 'category'):
+ if hasattr(item, "category"):
categories.append(item.category)
- if hasattr(item, 'tags'):
+ if hasattr(item, "tags"):
categories.extend(item.tags)
feed.add_item(
@@ -83,14 +89,12 @@ def _add_item_to_the_feed(self, feed, item):
description=description,
content=content,
categories=categories or None,
- author_name=getattr(item, 'author', ''),
- pubdate=set_date_tzinfo(
- item.date, self.settings.get('TIMEZONE', None)
- ),
+ author_name=getattr(item, "author", ""),
+ pubdate=set_date_tzinfo(item.date, self.settings.get("TIMEZONE", None)),
updateddate=set_date_tzinfo(
- item.modified, self.settings.get('TIMEZONE', None)
+ item.modified, self.settings.get("TIMEZONE", None)
)
- if hasattr(item, 'modified')
+ if hasattr(item, "modified")
else None,
)
@@ -102,22 +106,29 @@ def _open_w(self, filename, encoding, override=False):
"""
if filename in self._overridden_files:
if override:
- raise RuntimeError('File %s is set to be overridden twice'
- % filename)
- logger.info('Skipping %s', filename)
+ raise RuntimeError("File %s is set to be overridden twice" % filename)
+ logger.info("Skipping %s", filename)
filename = os.devnull
elif filename in self._written_files:
if override:
- logger.info('Overwriting %s', filename)
+ logger.info("Overwriting %s", filename)
else:
- raise RuntimeError('File %s is to be overwritten' % filename)
+ raise RuntimeError("File %s is to be overwritten" % filename)
if override:
self._overridden_files.add(filename)
self._written_files.add(filename)
- return open(filename, 'w', encoding=encoding)
-
- def write_feed(self, elements, context, path=None, url=None,
- feed_type='atom', override_output=False, feed_title=None):
+ return open(filename, "w", encoding=encoding)
+
+ def write_feed(
+ self,
+ elements,
+ context,
+ path=None,
+ url=None,
+ feed_type="atom",
+ override_output=False,
+ feed_title=None,
+ ):
"""Generate a feed with the list of articles provided
Return the feed. If no path or output_path is specified, just
@@ -137,16 +148,15 @@ def write_feed(self, elements, context, path=None, url=None,
if not is_selected_for_writing(self.settings, path):
return
- self.site_url = context.get(
- 'SITEURL', path_to_url(get_relative_path(path)))
+ self.site_url = context.get("SITEURL", path_to_url(get_relative_path(path)))
- self.feed_domain = context.get('FEED_DOMAIN')
+ self.feed_domain = context.get("FEED_DOMAIN")
self.feed_url = self.urljoiner(self.feed_domain, url or path)
feed = self._create_new_feed(feed_type, feed_title, context)
# FEED_MAX_ITEMS = None means [:None] to get every element
- for element in elements[:self.settings['FEED_MAX_ITEMS']]:
+ for element in elements[: self.settings["FEED_MAX_ITEMS"]]:
self._add_item_to_the_feed(feed, element)
signals.feed_generated.send(context, feed=feed)
@@ -158,17 +168,25 @@ def write_feed(self, elements, context, path=None, url=None,
except Exception:
pass
- with self._open_w(complete_path, 'utf-8', override_output) as fp:
- feed.write(fp, 'utf-8')
- logger.info('Writing %s', complete_path)
+ with self._open_w(complete_path, "utf-8", override_output) as fp:
+ feed.write(fp, "utf-8")
+ logger.info("Writing %s", complete_path)
- signals.feed_written.send(
- complete_path, context=context, feed=feed)
+ signals.feed_written.send(complete_path, context=context, feed=feed)
return feed
- def write_file(self, name, template, context, relative_urls=False,
- paginated=None, template_name=None, override_output=False,
- url=None, **kwargs):
+ def write_file(
+ self,
+ name,
+ template,
+ context,
+ relative_urls=False,
+ paginated=None,
+ template_name=None,
+ override_output=False,
+ url=None,
+ **kwargs,
+ ):
"""Render the template and write the file.
:param name: name of the file to output
@@ -185,10 +203,13 @@ def write_file(self, name, template, context, relative_urls=False,
:param **kwargs: additional variables to pass to the templates
"""
- if name is False or \
- name == "" or \
- not is_selected_for_writing(self.settings,
- os.path.join(self.output_path, name)):
+ if (
+ name is False
+ or name == ""
+ or not is_selected_for_writing(
+ self.settings, os.path.join(self.output_path, name)
+ )
+ ):
return
elif not name:
# other stuff, just return for now
@@ -197,8 +218,8 @@ def write_file(self, name, template, context, relative_urls=False,
def _write_file(template, localcontext, output_path, name, override):
"""Render the template write the file."""
# set localsiteurl for context so that Contents can adjust links
- if localcontext['localsiteurl']:
- context['localsiteurl'] = localcontext['localsiteurl']
+ if localcontext["localsiteurl"]:
+ context["localsiteurl"] = localcontext["localsiteurl"]
output = template.render(localcontext)
path = sanitised_join(output_path, name)
@@ -207,9 +228,9 @@ def _write_file(template, localcontext, output_path, name, override):
except Exception:
pass
- with self._open_w(path, 'utf-8', override=override) as f:
+ with self._open_w(path, "utf-8", override=override) as f:
f.write(output)
- logger.info('Writing %s', path)
+ logger.info("Writing %s", path)
# Send a signal to say we're writing a file with some specific
# local context.
@@ -217,54 +238,66 @@ def _write_file(template, localcontext, output_path, name, override):
def _get_localcontext(context, name, kwargs, relative_urls):
localcontext = context.copy()
- localcontext['localsiteurl'] = localcontext.get(
- 'localsiteurl', None)
+ localcontext["localsiteurl"] = localcontext.get("localsiteurl", None)
if relative_urls:
relative_url = path_to_url(get_relative_path(name))
- localcontext['SITEURL'] = relative_url
- localcontext['localsiteurl'] = relative_url
- localcontext['output_file'] = name
+ localcontext["SITEURL"] = relative_url
+ localcontext["localsiteurl"] = relative_url
+ localcontext["output_file"] = name
localcontext.update(kwargs)
return localcontext
if paginated is None:
- paginated = {key: val for key, val in kwargs.items()
- if key in {'articles', 'dates'}}
+ paginated = {
+ key: val for key, val in kwargs.items() if key in {"articles", "dates"}
+ }
# pagination
- if paginated and template_name in self.settings['PAGINATED_TEMPLATES']:
+ if paginated and template_name in self.settings["PAGINATED_TEMPLATES"]:
# pagination needed
- per_page = self.settings['PAGINATED_TEMPLATES'][template_name] \
- or self.settings['DEFAULT_PAGINATION']
+ per_page = (
+ self.settings["PAGINATED_TEMPLATES"][template_name]
+ or self.settings["DEFAULT_PAGINATION"]
+ )
# init paginators
- paginators = {key: Paginator(name, url, val, self.settings,
- per_page)
- for key, val in paginated.items()}
+ paginators = {
+ key: Paginator(name, url, val, self.settings, per_page)
+ for key, val in paginated.items()
+ }
# generated pages, and write
for page_num in range(list(paginators.values())[0].num_pages):
paginated_kwargs = kwargs.copy()
for key in paginators.keys():
paginator = paginators[key]
- previous_page = paginator.page(page_num) \
- if page_num > 0 else None
+ previous_page = paginator.page(page_num) if page_num > 0 else None
page = paginator.page(page_num + 1)
- next_page = paginator.page(page_num + 2) \
- if page_num + 1 < paginator.num_pages else None
+ next_page = (
+ paginator.page(page_num + 2)
+ if page_num + 1 < paginator.num_pages
+ else None
+ )
paginated_kwargs.update(
- {'%s_paginator' % key: paginator,
- '%s_page' % key: page,
- '%s_previous_page' % key: previous_page,
- '%s_next_page' % key: next_page})
+ {
+ "%s_paginator" % key: paginator,
+ "%s_page" % key: page,
+ "%s_previous_page" % key: previous_page,
+ "%s_next_page" % key: next_page,
+ }
+ )
localcontext = _get_localcontext(
- context, page.save_as, paginated_kwargs, relative_urls)
- _write_file(template, localcontext, self.output_path,
- page.save_as, override_output)
+ context, page.save_as, paginated_kwargs, relative_urls
+ )
+ _write_file(
+ template,
+ localcontext,
+ self.output_path,
+ page.save_as,
+ override_output,
+ )
else:
# no pagination
- localcontext = _get_localcontext(
- context, name, kwargs, relative_urls)
- _write_file(template, localcontext, self.output_path, name,
- override_output)
+ localcontext = _get_localcontext(context, name, kwargs, relative_urls)
+ _write_file(template, localcontext, self.output_path, name, override_output)
diff --git a/samples/pelican.conf.py b/samples/pelican.conf.py
--- a/samples/pelican.conf.py
+++ b/samples/pelican.conf.py
@@ -1,55 +1,59 @@
-AUTHOR = 'Alexis Métaireau'
+AUTHOR = "Alexis Métaireau"
SITENAME = "Alexis' log"
-SITESUBTITLE = 'A personal blog.'
-SITEURL = 'http://blog.notmyidea.org'
+SITESUBTITLE = "A personal blog."
+SITEURL = "http://blog.notmyidea.org"
TIMEZONE = "Europe/Paris"
# can be useful in development, but set to False when you're ready to publish
RELATIVE_URLS = True
-GITHUB_URL = 'http://github.com/ametaireau/'
+GITHUB_URL = "http://github.com/ametaireau/"
DISQUS_SITENAME = "blog-notmyidea"
REVERSE_CATEGORY_ORDER = True
LOCALE = "C"
DEFAULT_PAGINATION = 4
DEFAULT_DATE = (2012, 3, 2, 14, 1, 1)
-FEED_ALL_RSS = 'feeds/all.rss.xml'
-CATEGORY_FEED_RSS = 'feeds/{slug}.rss.xml'
+FEED_ALL_RSS = "feeds/all.rss.xml"
+CATEGORY_FEED_RSS = "feeds/{slug}.rss.xml"
-LINKS = (('Biologeek', 'http://biologeek.org'),
- ('Filyb', "http://filyb.info/"),
- ('Libert-fr', "http://www.libert-fr.com"),
- ('N1k0', "http://prendreuncafe.com/blog/"),
- ('Tarek Ziadé', "http://ziade.org/blog"),
- ('Zubin Mithra', "http://zubin71.wordpress.com/"),)
+LINKS = (
+ ("Biologeek", "http://biologeek.org"),
+ ("Filyb", "http://filyb.info/"),
+ ("Libert-fr", "http://www.libert-fr.com"),
+ ("N1k0", "http://prendreuncafe.com/blog/"),
+ ("Tarek Ziadé", "http://ziade.org/blog"),
+ ("Zubin Mithra", "http://zubin71.wordpress.com/"),
+)
-SOCIAL = (('twitter', 'http://twitter.com/ametaireau'),
- ('lastfm', 'http://lastfm.com/user/akounet'),
- ('github', 'http://github.com/ametaireau'),)
+SOCIAL = (
+ ("twitter", "http://twitter.com/ametaireau"),
+ ("lastfm", "http://lastfm.com/user/akounet"),
+ ("github", "http://github.com/ametaireau"),
+)
# global metadata to all the contents
-DEFAULT_METADATA = {'yeah': 'it is'}
+DEFAULT_METADATA = {"yeah": "it is"}
# path-specific metadata
EXTRA_PATH_METADATA = {
- 'extra/robots.txt': {'path': 'robots.txt'},
- }
+ "extra/robots.txt": {"path": "robots.txt"},
+}
# static paths will be copied without parsing their contents
STATIC_PATHS = [
- 'images',
- 'extra/robots.txt',
- ]
+ "images",
+ "extra/robots.txt",
+]
# custom page generated with a jinja2 template
-TEMPLATE_PAGES = {'pages/jinja2_template.html': 'jinja2_template.html'}
+TEMPLATE_PAGES = {"pages/jinja2_template.html": "jinja2_template.html"}
# there is no other HTML content
-READERS = {'html': None}
+READERS = {"html": None}
# code blocks with line numbers
-PYGMENTS_RST_OPTIONS = {'linenos': 'table'}
+PYGMENTS_RST_OPTIONS = {"linenos": "table"}
# foobar will not be used, because it's not in caps. All configuration keys
# have to be in caps
diff --git a/samples/pelican.conf_FR.py b/samples/pelican.conf_FR.py
--- a/samples/pelican.conf_FR.py
+++ b/samples/pelican.conf_FR.py
@@ -1,56 +1,60 @@
-AUTHOR = 'Alexis Métaireau'
+AUTHOR = "Alexis Métaireau"
SITENAME = "Alexis' log"
-SITEURL = 'http://blog.notmyidea.org'
+SITEURL = "http://blog.notmyidea.org"
TIMEZONE = "Europe/Paris"
# can be useful in development, but set to False when you're ready to publish
RELATIVE_URLS = True
-GITHUB_URL = 'http://github.com/ametaireau/'
+GITHUB_URL = "http://github.com/ametaireau/"
DISQUS_SITENAME = "blog-notmyidea"
PDF_GENERATOR = False
REVERSE_CATEGORY_ORDER = True
LOCALE = "fr_FR.UTF-8"
DEFAULT_PAGINATION = 4
DEFAULT_DATE = (2012, 3, 2, 14, 1, 1)
-DEFAULT_DATE_FORMAT = '%d %B %Y'
+DEFAULT_DATE_FORMAT = "%d %B %Y"
-ARTICLE_URL = 'posts/{date:%Y}/{date:%B}/{date:%d}/{slug}/'
-ARTICLE_SAVE_AS = ARTICLE_URL + 'index.html'
+ARTICLE_URL = "posts/{date:%Y}/{date:%B}/{date:%d}/{slug}/"
+ARTICLE_SAVE_AS = ARTICLE_URL + "index.html"
-FEED_ALL_RSS = 'feeds/all.rss.xml'
-CATEGORY_FEED_RSS = 'feeds/{slug}.rss.xml'
+FEED_ALL_RSS = "feeds/all.rss.xml"
+CATEGORY_FEED_RSS = "feeds/{slug}.rss.xml"
-LINKS = (('Biologeek', 'http://biologeek.org'),
- ('Filyb', "http://filyb.info/"),
- ('Libert-fr', "http://www.libert-fr.com"),
- ('N1k0', "http://prendreuncafe.com/blog/"),
- ('Tarek Ziadé', "http://ziade.org/blog"),
- ('Zubin Mithra', "http://zubin71.wordpress.com/"),)
+LINKS = (
+ ("Biologeek", "http://biologeek.org"),
+ ("Filyb", "http://filyb.info/"),
+ ("Libert-fr", "http://www.libert-fr.com"),
+ ("N1k0", "http://prendreuncafe.com/blog/"),
+ ("Tarek Ziadé", "http://ziade.org/blog"),
+ ("Zubin Mithra", "http://zubin71.wordpress.com/"),
+)
-SOCIAL = (('twitter', 'http://twitter.com/ametaireau'),
- ('lastfm', 'http://lastfm.com/user/akounet'),
- ('github', 'http://github.com/ametaireau'),)
+SOCIAL = (
+ ("twitter", "http://twitter.com/ametaireau"),
+ ("lastfm", "http://lastfm.com/user/akounet"),
+ ("github", "http://github.com/ametaireau"),
+)
# global metadata to all the contents
-DEFAULT_METADATA = {'yeah': 'it is'}
+DEFAULT_METADATA = {"yeah": "it is"}
# path-specific metadata
EXTRA_PATH_METADATA = {
- 'extra/robots.txt': {'path': 'robots.txt'},
- }
+ "extra/robots.txt": {"path": "robots.txt"},
+}
# static paths will be copied without parsing their contents
STATIC_PATHS = [
- 'pictures',
- 'extra/robots.txt',
- ]
+ "pictures",
+ "extra/robots.txt",
+]
# custom page generated with a jinja2 template
-TEMPLATE_PAGES = {'pages/jinja2_template.html': 'jinja2_template.html'}
+TEMPLATE_PAGES = {"pages/jinja2_template.html": "jinja2_template.html"}
# code blocks with line numbers
-PYGMENTS_RST_OPTIONS = {'linenos': 'table'}
+PYGMENTS_RST_OPTIONS = {"linenos": "table"}
# foobar will not be used, because it's not in caps. All configuration keys
# have to be in caps
diff --git a/tasks.py b/tasks.py
--- a/tasks.py
+++ b/tasks.py
@@ -52,24 +52,16 @@ def coverage(c):
@task
-def black(c, check=False, diff=False):
- """Run Black auto-formatter, optionally with --check or --diff"""
+def format(c, check=False, diff=False):
+ """Run Ruff's auto-formatter, optionally with --check or --diff"""
check_flag, diff_flag = "", ""
if check:
check_flag = "--check"
if diff:
diff_flag = "--diff"
- c.run(f"{VENV_BIN}/black {check_flag} {diff_flag} {PKG_PATH} tasks.py", pty=PTY)
-
-
-@task
-def isort(c, check=False, diff=False):
- check_flag, diff_flag = "", ""
- if check:
- check_flag = "-c"
- if diff:
- diff_flag = "--diff"
- c.run(f"{VENV_BIN}/isort {check_flag} {diff_flag} .", pty=PTY)
+ c.run(
+ f"{VENV_BIN}/ruff format {check_flag} {diff_flag} {PKG_PATH} tasks.py", pty=PTY
+ )
@task
@@ -87,6 +79,7 @@ def ruff(c, fix=False, diff=False):
def lint(c, fix=False, diff=False):
"""Check code style via linting tools."""
ruff(c, fix=fix, diff=diff)
+ format(c, check=not fix, diff=diff)
@task
| diff --git a/pelican/tests/TestPages/draft_page_markdown.md b/pelican/tests/TestPages/draft_page_markdown.md
--- a/pelican/tests/TestPages/draft_page_markdown.md
+++ b/pelican/tests/TestPages/draft_page_markdown.md
@@ -9,4 +9,4 @@ Used for pelican test
The quick brown fox .
-This page is a draft
\ No newline at end of file
+This page is a draft
diff --git a/pelican/tests/content/2012-11-30_md_w_filename_meta#foo-bar.md b/pelican/tests/content/2012-11-30_md_w_filename_meta#foo-bar.md
--- a/pelican/tests/content/2012-11-30_md_w_filename_meta#foo-bar.md
+++ b/pelican/tests/content/2012-11-30_md_w_filename_meta#foo-bar.md
@@ -3,4 +3,3 @@ author: Alexis Métaireau
Markdown with filename metadata
===============================
-
diff --git a/pelican/tests/content/article_with_markdown_markup_extensions.md b/pelican/tests/content/article_with_markdown_markup_extensions.md
--- a/pelican/tests/content/article_with_markdown_markup_extensions.md
+++ b/pelican/tests/content/article_with_markdown_markup_extensions.md
@@ -5,4 +5,3 @@ Title: Test Markdown extensions
## Level1
### Level2
-
diff --git a/pelican/tests/content/article_with_uppercase_metadata.rst b/pelican/tests/content/article_with_uppercase_metadata.rst
--- a/pelican/tests/content/article_with_uppercase_metadata.rst
+++ b/pelican/tests/content/article_with_uppercase_metadata.rst
@@ -3,4 +3,3 @@ This is a super article !
#########################
:Category: Yeah
-
diff --git a/pelican/tests/content/article_without_category.rst b/pelican/tests/content/article_without_category.rst
--- a/pelican/tests/content/article_without_category.rst
+++ b/pelican/tests/content/article_without_category.rst
@@ -3,4 +3,3 @@ This is an article without category !
#####################################
This article should be in the DEFAULT_CATEGORY.
-
diff --git a/pelican/tests/content/bloggerexport.xml b/pelican/tests/content/bloggerexport.xml
--- a/pelican/tests/content/bloggerexport.xml
+++ b/pelican/tests/content/bloggerexport.xml
@@ -1064,4 +1064,4 @@
<gd:extendedProperty name="blogger.itemClass" value="pid-944253050"/>
<gd:extendedProperty name="blogger.displayTime" value="29 november 2010 om 12:35"/>
</entry>
-</feed>
\ No newline at end of file
+</feed>
diff --git a/pelican/tests/content/empty_with_bom.md b/pelican/tests/content/empty_with_bom.md
--- a/pelican/tests/content/empty_with_bom.md
+++ b/pelican/tests/content/empty_with_bom.md
@@ -1 +1 @@
-
\ No newline at end of file
+
diff --git a/pelican/tests/content/wordpress_content_encoded b/pelican/tests/content/wordpress_content_encoded
--- a/pelican/tests/content/wordpress_content_encoded
+++ b/pelican/tests/content/wordpress_content_encoded
@@ -52,4 +52,3 @@ quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
-
diff --git a/pelican/tests/content/wordpressexport.xml b/pelican/tests/content/wordpressexport.xml
--- a/pelican/tests/content/wordpressexport.xml
+++ b/pelican/tests/content/wordpressexport.xml
@@ -838,7 +838,7 @@ proident, sunt in culpa qui officia deserunt mollit anim id est laborum.]]></con
<wp:meta_key>_edit_last</wp:meta_key>
<wp:meta_value><![CDATA[3]]></wp:meta_value>
</wp:postmeta>
- </item>
+ </item>
<item>
<title>A 2nd custom post type also in category 5</title>
<link>http://thisisa.test/?p=177</link>
diff --git a/pelican/tests/default_conf.py b/pelican/tests/default_conf.py
--- a/pelican/tests/default_conf.py
+++ b/pelican/tests/default_conf.py
@@ -1,43 +1,47 @@
-AUTHOR = 'Alexis Métaireau'
+AUTHOR = "Alexis Métaireau"
SITENAME = "Alexis' log"
-SITEURL = 'http://blog.notmyidea.org'
-TIMEZONE = 'UTC'
+SITEURL = "http://blog.notmyidea.org"
+TIMEZONE = "UTC"
-GITHUB_URL = 'http://github.com/ametaireau/'
+GITHUB_URL = "http://github.com/ametaireau/"
DISQUS_SITENAME = "blog-notmyidea"
PDF_GENERATOR = False
REVERSE_CATEGORY_ORDER = True
DEFAULT_PAGINATION = 2
-FEED_RSS = 'feeds/all.rss.xml'
-CATEGORY_FEED_RSS = 'feeds/{slug}.rss.xml'
+FEED_RSS = "feeds/all.rss.xml"
+CATEGORY_FEED_RSS = "feeds/{slug}.rss.xml"
-LINKS = (('Biologeek', 'http://biologeek.org'),
- ('Filyb', "http://filyb.info/"),
- ('Libert-fr', "http://www.libert-fr.com"),
- ('N1k0', "http://prendreuncafe.com/blog/"),
- ('Tarek Ziadé', "http://ziade.org/blog"),
- ('Zubin Mithra', "http://zubin71.wordpress.com/"),)
+LINKS = (
+ ("Biologeek", "http://biologeek.org"),
+ ("Filyb", "http://filyb.info/"),
+ ("Libert-fr", "http://www.libert-fr.com"),
+ ("N1k0", "http://prendreuncafe.com/blog/"),
+ ("Tarek Ziadé", "http://ziade.org/blog"),
+ ("Zubin Mithra", "http://zubin71.wordpress.com/"),
+)
-SOCIAL = (('twitter', 'http://twitter.com/ametaireau'),
- ('lastfm', 'http://lastfm.com/user/akounet'),
- ('github', 'http://github.com/ametaireau'),)
+SOCIAL = (
+ ("twitter", "http://twitter.com/ametaireau"),
+ ("lastfm", "http://lastfm.com/user/akounet"),
+ ("github", "http://github.com/ametaireau"),
+)
# global metadata to all the contents
-DEFAULT_METADATA = {'yeah': 'it is'}
+DEFAULT_METADATA = {"yeah": "it is"}
# path-specific metadata
EXTRA_PATH_METADATA = {
- 'extra/robots.txt': {'path': 'robots.txt'},
+ "extra/robots.txt": {"path": "robots.txt"},
}
# static paths will be copied without parsing their contents
STATIC_PATHS = [
- 'pictures',
- 'extra/robots.txt',
+ "pictures",
+ "extra/robots.txt",
]
-FORMATTED_FIELDS = ['summary', 'custom_formatted_field']
+FORMATTED_FIELDS = ["summary", "custom_formatted_field"]
# foobar will not be used, because it's not in caps. All configuration keys
# have to be in caps
diff --git a/pelican/tests/dummy_plugins/namespace_plugin/pelican/plugins/ns_plugin/__init__.py b/pelican/tests/dummy_plugins/namespace_plugin/pelican/plugins/ns_plugin/__init__.py
--- a/pelican/tests/dummy_plugins/namespace_plugin/pelican/plugins/ns_plugin/__init__.py
+++ b/pelican/tests/dummy_plugins/namespace_plugin/pelican/plugins/ns_plugin/__init__.py
@@ -1,4 +1,4 @@
-NAME = 'namespace plugin'
+NAME = "namespace plugin"
def register():
diff --git a/pelican/tests/support.py b/pelican/tests/support.py
--- a/pelican/tests/support.py
+++ b/pelican/tests/support.py
@@ -16,7 +16,10 @@
from pelican.readers import default_metadata
from pelican.settings import DEFAULT_CONFIG
-__all__ = ['get_article', 'unittest', ]
+__all__ = [
+ "get_article",
+ "unittest",
+]
@contextmanager
@@ -51,7 +54,7 @@ def isplit(s, sep=None):
True
"""
- sep, hardsep = r'\s+' if sep is None else re.escape(sep), sep is not None
+ sep, hardsep = r"\s+" if sep is None else re.escape(sep), sep is not None
exp, pos, length = re.compile(sep), 0, len(s)
while True:
m = exp.search(s, pos)
@@ -89,10 +92,8 @@ def mute(returns_output=False):
"""
def decorator(func):
-
@wraps(func)
def wrapper(*args, **kwargs):
-
saved_stdout = sys.stdout
sys.stdout = StringIO()
@@ -112,7 +113,7 @@ def wrapper(*args, **kwargs):
def get_article(title, content, **extra_metadata):
metadata = default_metadata(settings=DEFAULT_CONFIG)
- metadata['title'] = title
+ metadata["title"] = title
if extra_metadata:
metadata.update(extra_metadata)
return Article(content, metadata=metadata)
@@ -125,14 +126,14 @@ def skipIfNoExecutable(executable):
and skips the tests if not found (if subprocess raises a `OSError`).
"""
- with open(os.devnull, 'w') as fnull:
+ with open(os.devnull, "w") as fnull:
try:
res = subprocess.call(executable, stdout=fnull, stderr=fnull)
except OSError:
res = None
if res is None:
- return unittest.skip('{} executable not found'.format(executable))
+ return unittest.skip("{} executable not found".format(executable))
return lambda func: func
@@ -164,10 +165,7 @@ def can_symlink():
res = True
try:
with temporary_folder() as f:
- os.symlink(
- f,
- os.path.join(f, 'symlink')
- )
+ os.symlink(f, os.path.join(f, "symlink"))
except OSError:
res = False
return res
@@ -186,9 +184,9 @@ def get_settings(**kwargs):
def get_context(settings=None, **kwargs):
context = settings.copy() if settings else {}
- context['generated_content'] = {}
- context['static_links'] = set()
- context['static_content'] = {}
+ context["generated_content"] = {}
+ context["static_links"] = set()
+ context["static_content"] = {}
context.update(kwargs)
return context
@@ -200,22 +198,24 @@ def __init__(self, capacity=1000):
super().__init__(capacity)
def count_logs(self, msg=None, level=None):
- return len([
- rec
- for rec
- in self.buffer
- if (msg is None or re.match(msg, rec.getMessage())) and
- (level is None or rec.levelno == level)
- ])
+ return len(
+ [
+ rec
+ for rec in self.buffer
+ if (msg is None or re.match(msg, rec.getMessage()))
+ and (level is None or rec.levelno == level)
+ ]
+ )
def count_formatted_logs(self, msg=None, level=None):
- return len([
- rec
- for rec
- in self.buffer
- if (msg is None or re.search(msg, self.format(rec))) and
- (level is None or rec.levelno == level)
- ])
+ return len(
+ [
+ rec
+ for rec in self.buffer
+ if (msg is None or re.search(msg, self.format(rec)))
+ and (level is None or rec.levelno == level)
+ ]
+ )
def diff_subproc(first, second):
@@ -228,8 +228,16 @@ def diff_subproc(first, second):
>>> didCheckFail = proc.returnCode != 0
"""
return subprocess.Popen(
- ['git', '--no-pager', 'diff', '--no-ext-diff', '--exit-code',
- '-w', first, second],
+ [
+ "git",
+ "--no-pager",
+ "diff",
+ "--no-ext-diff",
+ "--exit-code",
+ "-w",
+ first,
+ second,
+ ],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
@@ -251,9 +259,12 @@ def tearDown(self):
def assertLogCountEqual(self, count=None, msg=None, **kwargs):
actual = self._logcount_handler.count_logs(msg=msg, **kwargs)
self.assertEqual(
- actual, count,
- msg='expected {} occurrences of {!r}, but found {}'.format(
- count, msg, actual))
+ actual,
+ count,
+ msg="expected {} occurrences of {!r}, but found {}".format(
+ count, msg, actual
+ ),
+ )
class TestCaseWithCLocale(unittest.TestCase):
@@ -261,9 +272,10 @@ class TestCaseWithCLocale(unittest.TestCase):
Use utils.temporary_locale if you want a context manager ("with" statement).
"""
+
def setUp(self):
self.old_locale = locale.setlocale(locale.LC_ALL)
- locale.setlocale(locale.LC_ALL, 'C')
+ locale.setlocale(locale.LC_ALL, "C")
def tearDown(self):
locale.setlocale(locale.LC_ALL, self.old_locale)
diff --git a/pelican/tests/test_cache.py b/pelican/tests/test_cache.py
--- a/pelican/tests/test_cache.py
+++ b/pelican/tests/test_cache.py
@@ -8,31 +8,30 @@
CUR_DIR = os.path.dirname(__file__)
-CONTENT_DIR = os.path.join(CUR_DIR, 'content')
+CONTENT_DIR = os.path.join(CUR_DIR, "content")
class TestCache(unittest.TestCase):
-
def setUp(self):
- self.temp_cache = mkdtemp(prefix='pelican_cache.')
+ self.temp_cache = mkdtemp(prefix="pelican_cache.")
def tearDown(self):
rmtree(self.temp_cache)
def _get_cache_enabled_settings(self):
settings = get_settings()
- settings['CACHE_CONTENT'] = True
- settings['LOAD_CONTENT_CACHE'] = True
- settings['CACHE_PATH'] = self.temp_cache
+ settings["CACHE_CONTENT"] = True
+ settings["LOAD_CONTENT_CACHE"] = True
+ settings["CACHE_PATH"] = self.temp_cache
return settings
def test_generator_caching(self):
"""Test that cached and uncached content is same in generator level"""
settings = self._get_cache_enabled_settings()
- settings['CONTENT_CACHING_LAYER'] = 'generator'
- settings['PAGE_PATHS'] = ['TestPages']
- settings['DEFAULT_DATE'] = (1970, 1, 1)
- settings['READERS'] = {'asc': None}
+ settings["CONTENT_CACHING_LAYER"] = "generator"
+ settings["PAGE_PATHS"] = ["TestPages"]
+ settings["DEFAULT_DATE"] = (1970, 1, 1)
+ settings["READERS"] = {"asc": None}
context = get_context(settings)
def sorted_titles(items):
@@ -40,15 +39,23 @@ def sorted_titles(items):
# Articles
generator = ArticlesGenerator(
- context=context.copy(), settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
uncached_articles = sorted_titles(generator.articles)
uncached_drafts = sorted_titles(generator.drafts)
generator = ArticlesGenerator(
- context=context.copy(), settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
cached_articles = sorted_titles(generator.articles)
cached_drafts = sorted_titles(generator.drafts)
@@ -58,16 +65,24 @@ def sorted_titles(items):
# Pages
generator = PagesGenerator(
- context=context.copy(), settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
uncached_pages = sorted_titles(generator.pages)
uncached_hidden_pages = sorted_titles(generator.hidden_pages)
uncached_draft_pages = sorted_titles(generator.draft_pages)
generator = PagesGenerator(
- context=context.copy(), settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
cached_pages = sorted_titles(generator.pages)
cached_hidden_pages = sorted_titles(generator.hidden_pages)
@@ -80,10 +95,10 @@ def sorted_titles(items):
def test_reader_caching(self):
"""Test that cached and uncached content is same in reader level"""
settings = self._get_cache_enabled_settings()
- settings['CONTENT_CACHING_LAYER'] = 'reader'
- settings['PAGE_PATHS'] = ['TestPages']
- settings['DEFAULT_DATE'] = (1970, 1, 1)
- settings['READERS'] = {'asc': None}
+ settings["CONTENT_CACHING_LAYER"] = "reader"
+ settings["PAGE_PATHS"] = ["TestPages"]
+ settings["DEFAULT_DATE"] = (1970, 1, 1)
+ settings["READERS"] = {"asc": None}
context = get_context(settings)
def sorted_titles(items):
@@ -91,15 +106,23 @@ def sorted_titles(items):
# Articles
generator = ArticlesGenerator(
- context=context.copy(), settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
uncached_articles = sorted_titles(generator.articles)
uncached_drafts = sorted_titles(generator.drafts)
generator = ArticlesGenerator(
- context=context.copy(), settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
cached_articles = sorted_titles(generator.articles)
cached_drafts = sorted_titles(generator.drafts)
@@ -109,15 +132,23 @@ def sorted_titles(items):
# Pages
generator = PagesGenerator(
- context=context.copy(), settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
uncached_pages = sorted_titles(generator.pages)
uncached_hidden_pages = sorted_titles(generator.hidden_pages)
generator = PagesGenerator(
- context=context.copy(), settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
cached_pages = sorted_titles(generator.pages)
cached_hidden_pages = sorted_titles(generator.hidden_pages)
@@ -128,20 +159,28 @@ def sorted_titles(items):
def test_article_object_caching(self):
"""Test Article objects caching at the generator level"""
settings = self._get_cache_enabled_settings()
- settings['CONTENT_CACHING_LAYER'] = 'generator'
- settings['DEFAULT_DATE'] = (1970, 1, 1)
- settings['READERS'] = {'asc': None}
+ settings["CONTENT_CACHING_LAYER"] = "generator"
+ settings["DEFAULT_DATE"] = (1970, 1, 1)
+ settings["READERS"] = {"asc": None}
context = get_context(settings)
generator = ArticlesGenerator(
- context=context.copy(), settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
- self.assertTrue(hasattr(generator, '_cache'))
+ self.assertTrue(hasattr(generator, "_cache"))
generator = ArticlesGenerator(
- context=context.copy(), settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.readers.read_file = MagicMock()
generator.generate_context()
"""
@@ -158,18 +197,26 @@ def test_article_object_caching(self):
def test_article_reader_content_caching(self):
"""Test raw article content caching at the reader level"""
settings = self._get_cache_enabled_settings()
- settings['READERS'] = {'asc': None}
+ settings["READERS"] = {"asc": None}
context = get_context(settings)
generator = ArticlesGenerator(
- context=context.copy(), settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
- self.assertTrue(hasattr(generator.readers, '_cache'))
+ self.assertTrue(hasattr(generator.readers, "_cache"))
generator = ArticlesGenerator(
- context=context.copy(), settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
readers = generator.readers.readers
for reader in readers.values():
reader.read = MagicMock()
@@ -182,44 +229,58 @@ def test_article_ignore_cache(self):
used in --ignore-cache or autoreload mode"""
settings = self._get_cache_enabled_settings()
- settings['READERS'] = {'asc': None}
+ settings["READERS"] = {"asc": None}
context = get_context(settings)
generator = ArticlesGenerator(
- context=context.copy(), settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.readers.read_file = MagicMock()
generator.generate_context()
- self.assertTrue(hasattr(generator, '_cache_open'))
+ self.assertTrue(hasattr(generator, "_cache_open"))
orig_call_count = generator.readers.read_file.call_count
- settings['LOAD_CONTENT_CACHE'] = False
+ settings["LOAD_CONTENT_CACHE"] = False
generator = ArticlesGenerator(
- context=context.copy(), settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.readers.read_file = MagicMock()
generator.generate_context()
- self.assertEqual(
- generator.readers.read_file.call_count,
- orig_call_count)
+ self.assertEqual(generator.readers.read_file.call_count, orig_call_count)
def test_page_object_caching(self):
"""Test Page objects caching at the generator level"""
settings = self._get_cache_enabled_settings()
- settings['CONTENT_CACHING_LAYER'] = 'generator'
- settings['PAGE_PATHS'] = ['TestPages']
- settings['READERS'] = {'asc': None}
+ settings["CONTENT_CACHING_LAYER"] = "generator"
+ settings["PAGE_PATHS"] = ["TestPages"]
+ settings["READERS"] = {"asc": None}
context = get_context(settings)
generator = PagesGenerator(
- context=context.copy(), settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
- self.assertTrue(hasattr(generator, '_cache'))
+ self.assertTrue(hasattr(generator, "_cache"))
generator = PagesGenerator(
- context=context.copy(), settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.readers.read_file = MagicMock()
generator.generate_context()
"""
@@ -231,19 +292,27 @@ def test_page_object_caching(self):
def test_page_reader_content_caching(self):
"""Test raw page content caching at the reader level"""
settings = self._get_cache_enabled_settings()
- settings['PAGE_PATHS'] = ['TestPages']
- settings['READERS'] = {'asc': None}
+ settings["PAGE_PATHS"] = ["TestPages"]
+ settings["READERS"] = {"asc": None}
context = get_context(settings)
generator = PagesGenerator(
- context=context.copy(), settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
- self.assertTrue(hasattr(generator.readers, '_cache'))
+ self.assertTrue(hasattr(generator.readers, "_cache"))
generator = PagesGenerator(
- context=context.copy(), settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
readers = generator.readers.readers
for reader in readers.values():
reader.read = MagicMock()
@@ -256,24 +325,30 @@ def test_page_ignore_cache(self):
used in --ignore_cache or autoreload mode"""
settings = self._get_cache_enabled_settings()
- settings['PAGE_PATHS'] = ['TestPages']
- settings['READERS'] = {'asc': None}
+ settings["PAGE_PATHS"] = ["TestPages"]
+ settings["READERS"] = {"asc": None}
context = get_context(settings)
generator = PagesGenerator(
- context=context.copy(), settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.readers.read_file = MagicMock()
generator.generate_context()
- self.assertTrue(hasattr(generator, '_cache_open'))
+ self.assertTrue(hasattr(generator, "_cache_open"))
orig_call_count = generator.readers.read_file.call_count
- settings['LOAD_CONTENT_CACHE'] = False
+ settings["LOAD_CONTENT_CACHE"] = False
generator = PagesGenerator(
- context=context.copy(), settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.readers.read_file = MagicMock()
generator.generate_context()
- self.assertEqual(
- generator.readers.read_file.call_count,
- orig_call_count)
+ self.assertEqual(generator.readers.read_file.call_count, orig_call_count)
diff --git a/pelican/tests/test_cli.py b/pelican/tests/test_cli.py
--- a/pelican/tests/test_cli.py
+++ b/pelican/tests/test_cli.py
@@ -5,68 +5,77 @@
class TestParseOverrides(unittest.TestCase):
def test_flags(self):
- for flag in ['-e', '--extra-settings']:
- args = parse_arguments([flag, 'k=1'])
- self.assertDictEqual(args.overrides, {'k': 1})
+ for flag in ["-e", "--extra-settings"]:
+ args = parse_arguments([flag, "k=1"])
+ self.assertDictEqual(args.overrides, {"k": 1})
def test_parse_multiple_items(self):
- args = parse_arguments('-e k1=1 k2=2'.split())
- self.assertDictEqual(args.overrides, {'k1': 1, 'k2': 2})
+ args = parse_arguments("-e k1=1 k2=2".split())
+ self.assertDictEqual(args.overrides, {"k1": 1, "k2": 2})
def test_parse_valid_json(self):
json_values_python_values_map = {
- '""': '',
- 'null': None,
- '"string"': 'string',
- '["foo", 12, "4", {}]': ['foo', 12, '4', {}]
+ '""': "",
+ "null": None,
+ '"string"': "string",
+ '["foo", 12, "4", {}]': ["foo", 12, "4", {}],
}
for k, v in json_values_python_values_map.items():
- args = parse_arguments(['-e', 'k=' + k])
- self.assertDictEqual(args.overrides, {'k': v})
+ args = parse_arguments(["-e", "k=" + k])
+ self.assertDictEqual(args.overrides, {"k": v})
def test_parse_invalid_syntax(self):
- invalid_items = ['k= 1', 'k =1', 'k', 'k v']
+ invalid_items = ["k= 1", "k =1", "k", "k v"]
for item in invalid_items:
with self.assertRaises(ValueError):
- parse_arguments(f'-e {item}'.split())
+ parse_arguments(f"-e {item}".split())
def test_parse_invalid_json(self):
invalid_json = {
- '', 'False', 'True', 'None', 'some other string',
- '{"foo": bar}', '[foo]'
+ "",
+ "False",
+ "True",
+ "None",
+ "some other string",
+ '{"foo": bar}',
+ "[foo]",
}
for v in invalid_json:
with self.assertRaises(ValueError):
- parse_arguments(['-e ', 'k=' + v])
+ parse_arguments(["-e ", "k=" + v])
class TestGetConfigFromArgs(unittest.TestCase):
def test_overrides_known_keys(self):
- args = parse_arguments([
- '-e',
- 'DELETE_OUTPUT_DIRECTORY=false',
- 'OUTPUT_RETENTION=["1.txt"]',
- 'SITENAME="Title"'
- ])
+ args = parse_arguments(
+ [
+ "-e",
+ "DELETE_OUTPUT_DIRECTORY=false",
+ 'OUTPUT_RETENTION=["1.txt"]',
+ 'SITENAME="Title"',
+ ]
+ )
config = get_config(args)
config_must_contain = {
- 'DELETE_OUTPUT_DIRECTORY': False,
- 'OUTPUT_RETENTION': ['1.txt'],
- 'SITENAME': 'Title'
+ "DELETE_OUTPUT_DIRECTORY": False,
+ "OUTPUT_RETENTION": ["1.txt"],
+ "SITENAME": "Title",
}
self.assertDictEqual(config, {**config, **config_must_contain})
def test_overrides_non_default_type(self):
- args = parse_arguments([
- '-e',
- 'DISPLAY_PAGES_ON_MENU=123',
- 'PAGE_TRANSLATION_ID=null',
- 'TRANSLATION_FEED_RSS_URL="someurl"'
- ])
+ args = parse_arguments(
+ [
+ "-e",
+ "DISPLAY_PAGES_ON_MENU=123",
+ "PAGE_TRANSLATION_ID=null",
+ 'TRANSLATION_FEED_RSS_URL="someurl"',
+ ]
+ )
config = get_config(args)
config_must_contain = {
- 'DISPLAY_PAGES_ON_MENU': 123,
- 'PAGE_TRANSLATION_ID': None,
- 'TRANSLATION_FEED_RSS_URL': 'someurl'
+ "DISPLAY_PAGES_ON_MENU": 123,
+ "PAGE_TRANSLATION_ID": None,
+ "TRANSLATION_FEED_RSS_URL": "someurl",
}
self.assertDictEqual(config, {**config, **config_must_contain})
diff --git a/pelican/tests/test_contents.py b/pelican/tests/test_contents.py
--- a/pelican/tests/test_contents.py
+++ b/pelican/tests/test_contents.py
@@ -10,9 +10,8 @@
from pelican.contents import Article, Author, Category, Page, Static
from pelican.plugins.signals import content_object_init
from pelican.settings import DEFAULT_CONFIG
-from pelican.tests.support import (LoggedTestCase, get_context, get_settings,
- unittest)
-from pelican.utils import (path_to_url, posixize_path, truncate_html_words)
+from pelican.tests.support import LoggedTestCase, get_context, get_settings, unittest
+from pelican.utils import path_to_url, posixize_path, truncate_html_words
# generate one paragraph, enclosed with <p>
@@ -21,25 +20,24 @@
class TestBase(LoggedTestCase):
-
def setUp(self):
super().setUp()
self.old_locale = locale.setlocale(locale.LC_ALL)
- locale.setlocale(locale.LC_ALL, 'C')
+ locale.setlocale(locale.LC_ALL, "C")
self.page_kwargs = {
- 'content': TEST_CONTENT,
- 'context': {
- 'localsiteurl': '',
- 'generated_content': {},
- 'static_content': {},
- 'static_links': set()
+ "content": TEST_CONTENT,
+ "context": {
+ "localsiteurl": "",
+ "generated_content": {},
+ "static_content": {},
+ "static_links": set(),
},
- 'metadata': {
- 'summary': TEST_SUMMARY,
- 'title': 'foo bar',
- 'author': Author('Blogger', DEFAULT_CONFIG),
+ "metadata": {
+ "summary": TEST_SUMMARY,
+ "title": "foo bar",
+ "author": Author("Blogger", DEFAULT_CONFIG),
},
- 'source_path': '/path/to/file/foo.ext'
+ "source_path": "/path/to/file/foo.ext",
}
self._disable_limit_filter()
@@ -49,10 +47,12 @@ def tearDown(self):
def _disable_limit_filter(self):
from pelican.contents import logger
+
logger.disable_filter()
def _enable_limit_filter(self):
from pelican.contents import logger
+
logger.enable_filter()
def _copy_page_kwargs(self):
@@ -72,9 +72,12 @@ class TestPage(TestBase):
def test_use_args(self):
# Creating a page with arguments passed to the constructor should use
# them to initialise object's attributes.
- metadata = {'foo': 'bar', 'foobar': 'baz', 'title': 'foobar', }
- page = Page(TEST_CONTENT, metadata=metadata,
- context={'localsiteurl': ''})
+ metadata = {
+ "foo": "bar",
+ "foobar": "baz",
+ "title": "foobar",
+ }
+ page = Page(TEST_CONTENT, metadata=metadata, context={"localsiteurl": ""})
for key, value in metadata.items():
self.assertTrue(hasattr(page, key))
self.assertEqual(value, getattr(page, key))
@@ -82,13 +85,14 @@ def test_use_args(self):
def test_mandatory_properties(self):
# If the title is not set, must throw an exception.
- page = Page('content')
+ page = Page("content")
self.assertFalse(page._has_valid_mandatory_properties())
self.assertLogCountEqual(
- count=1,
- msg="Skipping .*: could not find information about 'title'",
- level=logging.ERROR)
- page = Page('content', metadata={'title': 'foobar'})
+ count=1,
+ msg="Skipping .*: could not find information about 'title'",
+ level=logging.ERROR,
+ )
+ page = Page("content", metadata={"title": "foobar"})
self.assertTrue(page._has_valid_mandatory_properties())
def test_summary_from_metadata(self):
@@ -101,31 +105,32 @@ def test_summary_max_length(self):
# generated summary should not exceed the given length.
page_kwargs = self._copy_page_kwargs()
settings = get_settings()
- page_kwargs['settings'] = settings
- del page_kwargs['metadata']['summary']
- settings['SUMMARY_MAX_LENGTH'] = None
+ page_kwargs["settings"] = settings
+ del page_kwargs["metadata"]["summary"]
+ settings["SUMMARY_MAX_LENGTH"] = None
page = Page(**page_kwargs)
self.assertEqual(page.summary, TEST_CONTENT)
- settings['SUMMARY_MAX_LENGTH'] = 10
+ settings["SUMMARY_MAX_LENGTH"] = 10
page = Page(**page_kwargs)
self.assertEqual(page.summary, truncate_html_words(TEST_CONTENT, 10))
- settings['SUMMARY_MAX_LENGTH'] = 0
+ settings["SUMMARY_MAX_LENGTH"] = 0
page = Page(**page_kwargs)
- self.assertEqual(page.summary, '')
+ self.assertEqual(page.summary, "")
def test_summary_end_suffix(self):
# If a :SUMMARY_END_SUFFIX: is set, and there is no other summary,
# generated summary should contain the specified marker at the end.
page_kwargs = self._copy_page_kwargs()
settings = get_settings()
- page_kwargs['settings'] = settings
- del page_kwargs['metadata']['summary']
- settings['SUMMARY_END_SUFFIX'] = 'test_marker'
- settings['SUMMARY_MAX_LENGTH'] = 10
+ page_kwargs["settings"] = settings
+ del page_kwargs["metadata"]["summary"]
+ settings["SUMMARY_END_SUFFIX"] = "test_marker"
+ settings["SUMMARY_MAX_LENGTH"] = 10
page = Page(**page_kwargs)
- self.assertEqual(page.summary, truncate_html_words(TEST_CONTENT, 10,
- 'test_marker'))
- self.assertIn('test_marker', page.summary)
+ self.assertEqual(
+ page.summary, truncate_html_words(TEST_CONTENT, 10, "test_marker")
+ )
+ self.assertIn("test_marker", page.summary)
def test_summary_get_summary_warning(self):
"""calling ._get_summary() should issue a warning"""
@@ -134,57 +139,61 @@ def test_summary_get_summary_warning(self):
self.assertEqual(page.summary, TEST_SUMMARY)
self.assertEqual(page._get_summary(), TEST_SUMMARY)
self.assertLogCountEqual(
- count=1,
- msg=r"_get_summary\(\) has been deprecated since 3\.6\.4\. "
- "Use the summary decorator instead",
- level=logging.WARNING)
+ count=1,
+ msg=r"_get_summary\(\) has been deprecated since 3\.6\.4\. "
+ "Use the summary decorator instead",
+ level=logging.WARNING,
+ )
def test_slug(self):
page_kwargs = self._copy_page_kwargs()
settings = get_settings()
- page_kwargs['settings'] = settings
- settings['SLUGIFY_SOURCE'] = "title"
+ page_kwargs["settings"] = settings
+ settings["SLUGIFY_SOURCE"] = "title"
page = Page(**page_kwargs)
- self.assertEqual(page.slug, 'foo-bar')
- settings['SLUGIFY_SOURCE'] = "basename"
+ self.assertEqual(page.slug, "foo-bar")
+ settings["SLUGIFY_SOURCE"] = "basename"
page = Page(**page_kwargs)
- self.assertEqual(page.slug, 'foo')
+ self.assertEqual(page.slug, "foo")
# test slug from title with unicode and case
inputs = (
# (title, expected, preserve_case, use_unicode)
- ('指導書', 'zhi-dao-shu', False, False),
- ('指導書', 'Zhi-Dao-Shu', True, False),
- ('指導書', '指導書', False, True),
- ('指導書', '指導書', True, True),
- ('Çığ', 'cig', False, False),
- ('Çığ', 'Cig', True, False),
- ('Çığ', 'çığ', False, True),
- ('Çığ', 'Çığ', True, True),
+ ("指導書", "zhi-dao-shu", False, False),
+ ("指導書", "Zhi-Dao-Shu", True, False),
+ ("指導書", "指導書", False, True),
+ ("指導書", "指導書", True, True),
+ ("Çığ", "cig", False, False),
+ ("Çığ", "Cig", True, False),
+ ("Çığ", "çığ", False, True),
+ ("Çığ", "Çığ", True, True),
)
settings = get_settings()
page_kwargs = self._copy_page_kwargs()
- page_kwargs['settings'] = settings
+ page_kwargs["settings"] = settings
for title, expected, preserve_case, use_unicode in inputs:
- settings['SLUGIFY_PRESERVE_CASE'] = preserve_case
- settings['SLUGIFY_USE_UNICODE'] = use_unicode
- page_kwargs['metadata']['title'] = title
+ settings["SLUGIFY_PRESERVE_CASE"] = preserve_case
+ settings["SLUGIFY_USE_UNICODE"] = use_unicode
+ page_kwargs["metadata"]["title"] = title
page = Page(**page_kwargs)
- self.assertEqual(page.slug, expected,
- (title, preserve_case, use_unicode))
+ self.assertEqual(page.slug, expected, (title, preserve_case, use_unicode))
def test_defaultlang(self):
# If no lang is given, default to the default one.
page = Page(**self.page_kwargs)
- self.assertEqual(page.lang, DEFAULT_CONFIG['DEFAULT_LANG'])
+ self.assertEqual(page.lang, DEFAULT_CONFIG["DEFAULT_LANG"])
# it is possible to specify the lang in the metadata infos
- self.page_kwargs['metadata'].update({'lang': 'fr', })
+ self.page_kwargs["metadata"].update(
+ {
+ "lang": "fr",
+ }
+ )
page = Page(**self.page_kwargs)
- self.assertEqual(page.lang, 'fr')
+ self.assertEqual(page.lang, "fr")
def test_save_as(self):
# If a lang is not the default lang, save_as should be set
@@ -195,7 +204,11 @@ def test_save_as(self):
self.assertEqual(page.save_as, "pages/foo-bar.html")
# if a language is defined, save_as should include it accordingly
- self.page_kwargs['metadata'].update({'lang': 'fr', })
+ self.page_kwargs["metadata"].update(
+ {
+ "lang": "fr",
+ }
+ )
page = Page(**self.page_kwargs)
self.assertEqual(page.save_as, "pages/foo-bar-fr.html")
@@ -206,34 +219,32 @@ def test_relative_source_path(self):
# If 'source_path' is None, 'relative_source_path' should
# also return None
- page_kwargs['source_path'] = None
+ page_kwargs["source_path"] = None
page = Page(**page_kwargs)
self.assertIsNone(page.relative_source_path)
page_kwargs = self._copy_page_kwargs()
settings = get_settings()
- full_path = page_kwargs['source_path']
+ full_path = page_kwargs["source_path"]
- settings['PATH'] = os.path.dirname(full_path)
- page_kwargs['settings'] = settings
+ settings["PATH"] = os.path.dirname(full_path)
+ page_kwargs["settings"] = settings
page = Page(**page_kwargs)
# if 'source_path' is set, 'relative_source_path' should
# return the relative path from 'PATH' to 'source_path'
self.assertEqual(
page.relative_source_path,
- os.path.relpath(
- full_path,
- os.path.dirname(full_path)
- ))
+ os.path.relpath(full_path, os.path.dirname(full_path)),
+ )
def test_metadata_url_format(self):
# Arbitrary metadata should be passed through url_format()
page = Page(**self.page_kwargs)
- self.assertIn('summary', page.url_format.keys())
- page.metadata['directory'] = 'test-dir'
- page.settings = get_settings(PAGE_SAVE_AS='{directory}/{slug}')
- self.assertEqual(page.save_as, 'test-dir/foo-bar')
+ self.assertIn("summary", page.url_format.keys())
+ page.metadata["directory"] = "test-dir"
+ page.settings = get_settings(PAGE_SAVE_AS="{directory}/{slug}")
+ self.assertEqual(page.save_as, "test-dir/foo-bar")
def test_datetime(self):
# If DATETIME is set to a tuple, it should be used to override LOCALE
@@ -242,28 +253,28 @@ def test_datetime(self):
page_kwargs = self._copy_page_kwargs()
# set its date to dt
- page_kwargs['metadata']['date'] = dt
+ page_kwargs["metadata"]["date"] = dt
page = Page(**page_kwargs)
# page.locale_date is a unicode string in both python2 and python3
- dt_date = dt.strftime(DEFAULT_CONFIG['DEFAULT_DATE_FORMAT'])
+ dt_date = dt.strftime(DEFAULT_CONFIG["DEFAULT_DATE_FORMAT"])
self.assertEqual(page.locale_date, dt_date)
- page_kwargs['settings'] = get_settings()
+ page_kwargs["settings"] = get_settings()
# I doubt this can work on all platforms ...
if platform == "win32":
- locale = 'jpn'
+ locale = "jpn"
else:
- locale = 'ja_JP.utf8'
- page_kwargs['settings']['DATE_FORMATS'] = {'jp': (locale,
- '%Y-%m-%d(%a)')}
- page_kwargs['metadata']['lang'] = 'jp'
+ locale = "ja_JP.utf8"
+ page_kwargs["settings"]["DATE_FORMATS"] = {"jp": (locale, "%Y-%m-%d(%a)")}
+ page_kwargs["metadata"]["lang"] = "jp"
import locale as locale_module
+
try:
page = Page(**page_kwargs)
- self.assertEqual(page.locale_date, '2015-09-13(\u65e5)')
+ self.assertEqual(page.locale_date, "2015-09-13(\u65e5)")
except locale_module.Error:
# The constructor of ``Page`` will try to set the locale to
# ``ja_JP.utf8``. But this attempt will failed when there is no
@@ -277,22 +288,21 @@ def test_datetime(self):
def test_template(self):
# Pages default to page, metadata overwrites
default_page = Page(**self.page_kwargs)
- self.assertEqual('page', default_page.template)
+ self.assertEqual("page", default_page.template)
page_kwargs = self._copy_page_kwargs()
- page_kwargs['metadata']['template'] = 'custom'
+ page_kwargs["metadata"]["template"] = "custom"
custom_page = Page(**page_kwargs)
- self.assertEqual('custom', custom_page.template)
+ self.assertEqual("custom", custom_page.template)
def test_signal(self):
def receiver_test_function(sender):
receiver_test_function.has_been_called = True
pass
+
receiver_test_function.has_been_called = False
content_object_init.connect(receiver_test_function)
- self.assertIn(
- receiver_test_function,
- content_object_init.receivers_for(Page))
+ self.assertIn(receiver_test_function, content_object_init.receivers_for(Page))
self.assertFalse(receiver_test_function.has_been_called)
Page(**self.page_kwargs)
@@ -303,102 +313,106 @@ def test_get_content(self):
# filenames, tags and categories.
settings = get_settings()
args = self.page_kwargs.copy()
- args['settings'] = settings
+ args["settings"] = settings
# Tag
- args['content'] = ('A simple test, with a '
- '<a href="|tag|tagname">link</a>')
+ args["content"] = "A simple test, with a " '<a href="|tag|tagname">link</a>'
page = Page(**args)
- content = page.get_content('http://notmyidea.org')
+ content = page.get_content("http://notmyidea.org")
self.assertEqual(
content,
- ('A simple test, with a '
- '<a href="http://notmyidea.org/tag/tagname.html">link</a>'))
+ (
+ "A simple test, with a "
+ '<a href="http://notmyidea.org/tag/tagname.html">link</a>'
+ ),
+ )
# Category
- args['content'] = ('A simple test, with a '
- '<a href="|category|category">link</a>')
+ args["content"] = (
+ "A simple test, with a " '<a href="|category|category">link</a>'
+ )
page = Page(**args)
- content = page.get_content('http://notmyidea.org')
+ content = page.get_content("http://notmyidea.org")
self.assertEqual(
content,
- ('A simple test, with a '
- '<a href="http://notmyidea.org/category/category.html">link</a>'))
+ (
+ "A simple test, with a "
+ '<a href="http://notmyidea.org/category/category.html">link</a>'
+ ),
+ )
def test_intrasite_link(self):
- cls_name = '_DummyArticle'
- article = type(cls_name, (object,), {'url': 'article.html'})
+ cls_name = "_DummyArticle"
+ article = type(cls_name, (object,), {"url": "article.html"})
args = self.page_kwargs.copy()
- args['settings'] = get_settings()
- args['source_path'] = 'content'
- args['context']['generated_content'] = {'article.rst': article}
+ args["settings"] = get_settings()
+ args["source_path"] = "content"
+ args["context"]["generated_content"] = {"article.rst": article}
# Classic intrasite link via filename
- args['content'] = (
- 'A simple test, with a '
- '<a href="|filename|article.rst">link</a>'
+ args["content"] = (
+ "A simple test, with a " '<a href="|filename|article.rst">link</a>'
)
- content = Page(**args).get_content('http://notmyidea.org')
+ content = Page(**args).get_content("http://notmyidea.org")
self.assertEqual(
content,
- 'A simple test, with a '
- '<a href="http://notmyidea.org/article.html">link</a>'
+ "A simple test, with a "
+ '<a href="http://notmyidea.org/article.html">link</a>',
)
# fragment
- args['content'] = (
- 'A simple test, with a '
+ args["content"] = (
+ "A simple test, with a "
'<a href="|filename|article.rst#section-2">link</a>'
)
- content = Page(**args).get_content('http://notmyidea.org')
+ content = Page(**args).get_content("http://notmyidea.org")
self.assertEqual(
content,
- 'A simple test, with a '
- '<a href="http://notmyidea.org/article.html#section-2">link</a>'
+ "A simple test, with a "
+ '<a href="http://notmyidea.org/article.html#section-2">link</a>',
)
# query
- args['content'] = (
- 'A simple test, with a '
+ args["content"] = (
+ "A simple test, with a "
'<a href="|filename|article.rst'
'?utm_whatever=234&highlight=word">link</a>'
)
- content = Page(**args).get_content('http://notmyidea.org')
+ content = Page(**args).get_content("http://notmyidea.org")
self.assertEqual(
content,
- 'A simple test, with a '
+ "A simple test, with a "
'<a href="http://notmyidea.org/article.html'
- '?utm_whatever=234&highlight=word">link</a>'
+ '?utm_whatever=234&highlight=word">link</a>',
)
# combination
- args['content'] = (
- 'A simple test, with a '
+ args["content"] = (
+ "A simple test, with a "
'<a href="|filename|article.rst'
'?utm_whatever=234&highlight=word#section-2">link</a>'
)
- content = Page(**args).get_content('http://notmyidea.org')
+ content = Page(**args).get_content("http://notmyidea.org")
self.assertEqual(
content,
- 'A simple test, with a '
+ "A simple test, with a "
'<a href="http://notmyidea.org/article.html'
- '?utm_whatever=234&highlight=word#section-2">link</a>'
+ '?utm_whatever=234&highlight=word#section-2">link</a>',
)
# also test for summary in metadata
parsed = (
- 'A simple summary test, with a '
- '<a href="|filename|article.rst">link</a>'
+ "A simple summary test, with a " '<a href="|filename|article.rst">link</a>'
)
linked = (
- 'A simple summary test, with a '
+ "A simple summary test, with a "
'<a href="http://notmyidea.org/article.html">link</a>'
)
- args['settings']['FORMATTED_FIELDS'] = ['summary', 'custom']
- args['metadata']['summary'] = parsed
- args['metadata']['custom'] = parsed
- args['context']['localsiteurl'] = 'http://notmyidea.org'
+ args["settings"]["FORMATTED_FIELDS"] = ["summary", "custom"]
+ args["metadata"]["summary"] = parsed
+ args["metadata"]["custom"] = parsed
+ args["context"]["localsiteurl"] = "http://notmyidea.org"
p = Page(**args)
# This is called implicitly from all generators and Pelican.run() once
# all files are processed. Here we process just one page so it needs
@@ -408,252 +422,236 @@ def test_intrasite_link(self):
self.assertEqual(p.custom, linked)
def test_intrasite_link_more(self):
- cls_name = '_DummyAsset'
+ cls_name = "_DummyAsset"
args = self.page_kwargs.copy()
- args['settings'] = get_settings()
- args['source_path'] = 'content'
- args['context']['static_content'] = {
- 'images/poster.jpg':
- type(cls_name, (object,), {'url': 'images/poster.jpg'}),
- 'assets/video.mp4':
- type(cls_name, (object,), {'url': 'assets/video.mp4'}),
- 'images/graph.svg':
- type(cls_name, (object,), {'url': 'images/graph.svg'}),
+ args["settings"] = get_settings()
+ args["source_path"] = "content"
+ args["context"]["static_content"] = {
+ "images/poster.jpg": type(
+ cls_name, (object,), {"url": "images/poster.jpg"}
+ ),
+ "assets/video.mp4": type(cls_name, (object,), {"url": "assets/video.mp4"}),
+ "images/graph.svg": type(cls_name, (object,), {"url": "images/graph.svg"}),
}
- args['context']['generated_content'] = {
- 'reference.rst':
- type(cls_name, (object,), {'url': 'reference.html'}),
+ args["context"]["generated_content"] = {
+ "reference.rst": type(cls_name, (object,), {"url": "reference.html"}),
}
# video.poster
- args['content'] = (
- 'There is a video with poster '
+ args["content"] = (
+ "There is a video with poster "
'<video controls poster="{static}/images/poster.jpg">'
'<source src="|static|/assets/video.mp4" type="video/mp4">'
- '</video>'
+ "</video>"
)
- content = Page(**args).get_content('http://notmyidea.org')
+ content = Page(**args).get_content("http://notmyidea.org")
self.assertEqual(
content,
- 'There is a video with poster '
+ "There is a video with poster "
'<video controls poster="http://notmyidea.org/images/poster.jpg">'
'<source src="http://notmyidea.org/assets/video.mp4"'
' type="video/mp4">'
- '</video>'
+ "</video>",
)
# object.data
- args['content'] = (
- 'There is a svg object '
+ args["content"] = (
+ "There is a svg object "
'<object data="{static}/images/graph.svg"'
' type="image/svg+xml">'
- '</object>'
+ "</object>"
)
- content = Page(**args).get_content('http://notmyidea.org')
+ content = Page(**args).get_content("http://notmyidea.org")
self.assertEqual(
content,
- 'There is a svg object '
+ "There is a svg object "
'<object data="http://notmyidea.org/images/graph.svg"'
' type="image/svg+xml">'
- '</object>'
+ "</object>",
)
# blockquote.cite
- args['content'] = (
- 'There is a blockquote with cite attribute '
+ args["content"] = (
+ "There is a blockquote with cite attribute "
'<blockquote cite="{filename}reference.rst">blah blah</blockquote>'
)
- content = Page(**args).get_content('http://notmyidea.org')
+ content = Page(**args).get_content("http://notmyidea.org")
self.assertEqual(
content,
- 'There is a blockquote with cite attribute '
+ "There is a blockquote with cite attribute "
'<blockquote cite="http://notmyidea.org/reference.html">'
- 'blah blah'
- '</blockquote>'
+ "blah blah"
+ "</blockquote>",
)
def test_intrasite_link_absolute(self):
"""Test that absolute URLs are merged properly."""
args = self.page_kwargs.copy()
- args['settings'] = get_settings(
- STATIC_URL='http://static.cool.site/{path}',
- ARTICLE_URL='http://blog.cool.site/{slug}.html')
- args['source_path'] = 'content'
- args['context']['static_content'] = {
- 'images/poster.jpg':
- Static('', settings=args['settings'],
- source_path='images/poster.jpg'),
+ args["settings"] = get_settings(
+ STATIC_URL="http://static.cool.site/{path}",
+ ARTICLE_URL="http://blog.cool.site/{slug}.html",
+ )
+ args["source_path"] = "content"
+ args["context"]["static_content"] = {
+ "images/poster.jpg": Static(
+ "", settings=args["settings"], source_path="images/poster.jpg"
+ ),
}
- args['context']['generated_content'] = {
- 'article.rst':
- Article('', settings=args['settings'], metadata={
- 'slug': 'article', 'title': 'Article'})
+ args["context"]["generated_content"] = {
+ "article.rst": Article(
+ "",
+ settings=args["settings"],
+ metadata={"slug": "article", "title": "Article"},
+ )
}
# Article link will go to blog
- args['content'] = (
- '<a href="{filename}article.rst">Article</a>'
- )
- content = Page(**args).get_content('http://cool.site')
+ args["content"] = '<a href="{filename}article.rst">Article</a>'
+ content = Page(**args).get_content("http://cool.site")
self.assertEqual(
- content,
- '<a href="http://blog.cool.site/article.html">Article</a>'
+ content, '<a href="http://blog.cool.site/article.html">Article</a>'
)
# Page link will go to the main site
- args['content'] = (
- '<a href="{index}">Index</a>'
- )
- content = Page(**args).get_content('http://cool.site')
- self.assertEqual(
- content,
- '<a href="http://cool.site/index.html">Index</a>'
- )
+ args["content"] = '<a href="{index}">Index</a>'
+ content = Page(**args).get_content("http://cool.site")
+ self.assertEqual(content, '<a href="http://cool.site/index.html">Index</a>')
# Image link will go to static
- args['content'] = (
- '<img src="{static}/images/poster.jpg"/>'
- )
- content = Page(**args).get_content('http://cool.site')
+ args["content"] = '<img src="{static}/images/poster.jpg"/>'
+ content = Page(**args).get_content("http://cool.site")
self.assertEqual(
- content,
- '<img src="http://static.cool.site/images/poster.jpg"/>'
+ content, '<img src="http://static.cool.site/images/poster.jpg"/>'
)
# Image link will go to static
- args['content'] = (
- '<meta content="{static}/images/poster.jpg"/>'
- )
- content = Page(**args).get_content('http://cool.site')
+ args["content"] = '<meta content="{static}/images/poster.jpg"/>'
+ content = Page(**args).get_content("http://cool.site")
self.assertEqual(
- content,
- '<meta content="http://static.cool.site/images/poster.jpg"/>'
+ content, '<meta content="http://static.cool.site/images/poster.jpg"/>'
)
def test_intrasite_link_escape(self):
- article = type(
- '_DummyArticle', (object,), {'url': 'article-spaces.html'})
- asset = type(
- '_DummyAsset', (object,), {'url': 'name@example.com'})
+ article = type("_DummyArticle", (object,), {"url": "article-spaces.html"})
+ asset = type("_DummyAsset", (object,), {"url": "name@example.com"})
args = self.page_kwargs.copy()
- args['settings'] = get_settings()
- args['source_path'] = 'content'
- args['context']['generated_content'] = {'article spaces.rst': article}
- args['context']['static_content'] = {'name@example.com': asset}
+ args["settings"] = get_settings()
+ args["source_path"] = "content"
+ args["context"]["generated_content"] = {"article spaces.rst": article}
+ args["context"]["static_content"] = {"name@example.com": asset}
expected_output = (
- 'A simple test with a '
+ "A simple test with a "
'<a href="http://notmyidea.org/article-spaces.html#anchor">link</a> '
'<a href="http://notmyidea.org/name@example.com#anchor">file</a>'
)
# not escaped
- args['content'] = (
- 'A simple test with a '
+ args["content"] = (
+ "A simple test with a "
'<a href="{filename}article spaces.rst#anchor">link</a> '
'<a href="{static}name@example.com#anchor">file</a>'
)
- content = Page(**args).get_content('http://notmyidea.org')
+ content = Page(**args).get_content("http://notmyidea.org")
self.assertEqual(content, expected_output)
# html escaped
- args['content'] = (
- 'A simple test with a '
+ args["content"] = (
+ "A simple test with a "
'<a href="{filename}article spaces.rst#anchor">link</a> '
'<a href="{static}name@example.com#anchor">file</a>'
)
- content = Page(**args).get_content('http://notmyidea.org')
+ content = Page(**args).get_content("http://notmyidea.org")
self.assertEqual(content, expected_output)
# url escaped
- args['content'] = (
- 'A simple test with a '
+ args["content"] = (
+ "A simple test with a "
'<a href="{filename}article%20spaces.rst#anchor">link</a> '
'<a href="{static}name%40example.com#anchor">file</a>'
)
- content = Page(**args).get_content('http://notmyidea.org')
+ content = Page(**args).get_content("http://notmyidea.org")
self.assertEqual(content, expected_output)
# html and url escaped
- args['content'] = (
- 'A simple test with a '
+ args["content"] = (
+ "A simple test with a "
'<a href="{filename}article%20spaces.rst#anchor">link</a> '
'<a href="{static}name@example.com#anchor">file</a>'
)
- content = Page(**args).get_content('http://notmyidea.org')
+ content = Page(**args).get_content("http://notmyidea.org")
self.assertEqual(content, expected_output)
def test_intrasite_link_markdown_spaces(self):
- cls_name = '_DummyArticle'
- article = type(cls_name, (object,), {'url': 'article-spaces.html'})
+ cls_name = "_DummyArticle"
+ article = type(cls_name, (object,), {"url": "article-spaces.html"})
args = self.page_kwargs.copy()
- args['settings'] = get_settings()
- args['source_path'] = 'content'
- args['context']['generated_content'] = {'article spaces.rst': article}
+ args["settings"] = get_settings()
+ args["source_path"] = "content"
+ args["context"]["generated_content"] = {"article spaces.rst": article}
# An intrasite link via filename with %20 as a space
- args['content'] = (
- 'A simple test, with a '
- '<a href="|filename|article%20spaces.rst">link</a>'
+ args["content"] = (
+ "A simple test, with a " '<a href="|filename|article%20spaces.rst">link</a>'
)
- content = Page(**args).get_content('http://notmyidea.org')
+ content = Page(**args).get_content("http://notmyidea.org")
self.assertEqual(
content,
- 'A simple test, with a '
- '<a href="http://notmyidea.org/article-spaces.html">link</a>'
+ "A simple test, with a "
+ '<a href="http://notmyidea.org/article-spaces.html">link</a>',
)
def test_intrasite_link_source_and_generated(self):
- """Test linking both to the source and the generated article
- """
- cls_name = '_DummyAsset'
+ """Test linking both to the source and the generated article"""
+ cls_name = "_DummyAsset"
args = self.page_kwargs.copy()
- args['settings'] = get_settings()
- args['source_path'] = 'content'
- args['context']['generated_content'] = {
- 'article.rst': type(cls_name, (object,), {'url': 'article.html'})}
- args['context']['static_content'] = {
- 'article.rst': type(cls_name, (object,), {'url': 'article.rst'})}
-
- args['content'] = (
- 'A simple test, with a link to an'
+ args["settings"] = get_settings()
+ args["source_path"] = "content"
+ args["context"]["generated_content"] = {
+ "article.rst": type(cls_name, (object,), {"url": "article.html"})
+ }
+ args["context"]["static_content"] = {
+ "article.rst": type(cls_name, (object,), {"url": "article.rst"})
+ }
+
+ args["content"] = (
+ "A simple test, with a link to an"
'<a href="{filename}article.rst">article</a> and its'
'<a href="{static}article.rst">source</a>'
)
- content = Page(**args).get_content('http://notmyidea.org')
+ content = Page(**args).get_content("http://notmyidea.org")
self.assertEqual(
content,
- 'A simple test, with a link to an'
+ "A simple test, with a link to an"
'<a href="http://notmyidea.org/article.html">article</a> and its'
- '<a href="http://notmyidea.org/article.rst">source</a>'
+ '<a href="http://notmyidea.org/article.rst">source</a>',
)
def test_intrasite_link_to_static_content_with_filename(self):
- """Test linking to a static resource with deprecated {filename}
- """
- cls_name = '_DummyAsset'
+ """Test linking to a static resource with deprecated {filename}"""
+ cls_name = "_DummyAsset"
args = self.page_kwargs.copy()
- args['settings'] = get_settings()
- args['source_path'] = 'content'
- args['context']['static_content'] = {
- 'poster.jpg':
- type(cls_name, (object,), {'url': 'images/poster.jpg'})}
-
- args['content'] = (
- 'A simple test, with a link to a'
+ args["settings"] = get_settings()
+ args["source_path"] = "content"
+ args["context"]["static_content"] = {
+ "poster.jpg": type(cls_name, (object,), {"url": "images/poster.jpg"})
+ }
+
+ args["content"] = (
+ "A simple test, with a link to a"
'<a href="{filename}poster.jpg">poster</a>'
)
- content = Page(**args).get_content('http://notmyidea.org')
+ content = Page(**args).get_content("http://notmyidea.org")
self.assertEqual(
content,
- 'A simple test, with a link to a'
- '<a href="http://notmyidea.org/images/poster.jpg">poster</a>'
+ "A simple test, with a link to a"
+ '<a href="http://notmyidea.org/images/poster.jpg">poster</a>',
)
def test_multiple_authors(self):
@@ -661,9 +659,11 @@ def test_multiple_authors(self):
args = self.page_kwargs.copy()
content = Page(**args)
assert content.authors == [content.author]
- args['metadata'].pop('author')
- args['metadata']['authors'] = [Author('First Author', DEFAULT_CONFIG),
- Author('Second Author', DEFAULT_CONFIG)]
+ args["metadata"].pop("author")
+ args["metadata"]["authors"] = [
+ Author("First Author", DEFAULT_CONFIG),
+ Author("Second Author", DEFAULT_CONFIG),
+ ]
content = Page(**args)
assert content.authors
assert content.author == content.authors[0]
@@ -673,173 +673,184 @@ class TestArticle(TestBase):
def test_template(self):
# Articles default to article, metadata overwrites
default_article = Article(**self.page_kwargs)
- self.assertEqual('article', default_article.template)
+ self.assertEqual("article", default_article.template)
article_kwargs = self._copy_page_kwargs()
- article_kwargs['metadata']['template'] = 'custom'
+ article_kwargs["metadata"]["template"] = "custom"
custom_article = Article(**article_kwargs)
- self.assertEqual('custom', custom_article.template)
+ self.assertEqual("custom", custom_article.template)
def test_slugify_category_author(self):
settings = get_settings()
- settings['SLUG_REGEX_SUBSTITUTIONS'] = [
- (r'C#', 'csharp'),
- (r'[^\w\s-]', ''),
- (r'(?u)\A\s*', ''),
- (r'(?u)\s*\Z', ''),
- (r'[-\s]+', '-'),
+ settings["SLUG_REGEX_SUBSTITUTIONS"] = [
+ (r"C#", "csharp"),
+ (r"[^\w\s-]", ""),
+ (r"(?u)\A\s*", ""),
+ (r"(?u)\s*\Z", ""),
+ (r"[-\s]+", "-"),
]
- settings['ARTICLE_URL'] = '{author}/{category}/{slug}/'
- settings['ARTICLE_SAVE_AS'] = '{author}/{category}/{slug}/index.html'
+ settings["ARTICLE_URL"] = "{author}/{category}/{slug}/"
+ settings["ARTICLE_SAVE_AS"] = "{author}/{category}/{slug}/index.html"
article_kwargs = self._copy_page_kwargs()
- article_kwargs['metadata']['author'] = Author("O'Brien", settings)
- article_kwargs['metadata']['category'] = Category(
- 'C# & stuff', settings)
- article_kwargs['metadata']['title'] = 'fnord'
- article_kwargs['settings'] = settings
+ article_kwargs["metadata"]["author"] = Author("O'Brien", settings)
+ article_kwargs["metadata"]["category"] = Category("C# & stuff", settings)
+ article_kwargs["metadata"]["title"] = "fnord"
+ article_kwargs["settings"] = settings
article = Article(**article_kwargs)
- self.assertEqual(article.url, 'obrien/csharp-stuff/fnord/')
- self.assertEqual(
- article.save_as, 'obrien/csharp-stuff/fnord/index.html')
+ self.assertEqual(article.url, "obrien/csharp-stuff/fnord/")
+ self.assertEqual(article.save_as, "obrien/csharp-stuff/fnord/index.html")
def test_slugify_with_author_substitutions(self):
settings = get_settings()
- settings['AUTHOR_REGEX_SUBSTITUTIONS'] = [
- ('Alexander Todorov', 'atodorov'),
- ('Krasimir Tsonev', 'krasimir'),
- (r'[^\w\s-]', ''),
- (r'(?u)\A\s*', ''),
- (r'(?u)\s*\Z', ''),
- (r'[-\s]+', '-'),
+ settings["AUTHOR_REGEX_SUBSTITUTIONS"] = [
+ ("Alexander Todorov", "atodorov"),
+ ("Krasimir Tsonev", "krasimir"),
+ (r"[^\w\s-]", ""),
+ (r"(?u)\A\s*", ""),
+ (r"(?u)\s*\Z", ""),
+ (r"[-\s]+", "-"),
]
- settings['ARTICLE_URL'] = 'blog/{author}/{slug}/'
- settings['ARTICLE_SAVE_AS'] = 'blog/{author}/{slug}/index.html'
+ settings["ARTICLE_URL"] = "blog/{author}/{slug}/"
+ settings["ARTICLE_SAVE_AS"] = "blog/{author}/{slug}/index.html"
article_kwargs = self._copy_page_kwargs()
- article_kwargs['metadata']['author'] = Author('Alexander Todorov',
- settings)
- article_kwargs['metadata']['title'] = 'fnord'
- article_kwargs['settings'] = settings
+ article_kwargs["metadata"]["author"] = Author("Alexander Todorov", settings)
+ article_kwargs["metadata"]["title"] = "fnord"
+ article_kwargs["settings"] = settings
article = Article(**article_kwargs)
- self.assertEqual(article.url, 'blog/atodorov/fnord/')
- self.assertEqual(article.save_as, 'blog/atodorov/fnord/index.html')
+ self.assertEqual(article.url, "blog/atodorov/fnord/")
+ self.assertEqual(article.save_as, "blog/atodorov/fnord/index.html")
def test_slugify_category_with_dots(self):
settings = get_settings()
- settings['CATEGORY_REGEX_SUBSTITUTIONS'] = [
- ('Fedora QA', 'fedora.qa'),
+ settings["CATEGORY_REGEX_SUBSTITUTIONS"] = [
+ ("Fedora QA", "fedora.qa"),
]
- settings['ARTICLE_URL'] = '{category}/{slug}/'
+ settings["ARTICLE_URL"] = "{category}/{slug}/"
article_kwargs = self._copy_page_kwargs()
- article_kwargs['metadata']['category'] = Category('Fedora QA',
- settings)
- article_kwargs['metadata']['title'] = 'This Week in Fedora QA'
- article_kwargs['settings'] = settings
+ article_kwargs["metadata"]["category"] = Category("Fedora QA", settings)
+ article_kwargs["metadata"]["title"] = "This Week in Fedora QA"
+ article_kwargs["settings"] = settings
article = Article(**article_kwargs)
- self.assertEqual(article.url, 'fedora.qa/this-week-in-fedora-qa/')
+ self.assertEqual(article.url, "fedora.qa/this-week-in-fedora-qa/")
def test_valid_save_as_detects_breakout(self):
settings = get_settings()
article_kwargs = self._copy_page_kwargs()
- article_kwargs['metadata']['slug'] = '../foo'
- article_kwargs['settings'] = settings
+ article_kwargs["metadata"]["slug"] = "../foo"
+ article_kwargs["settings"] = settings
article = Article(**article_kwargs)
self.assertFalse(article._has_valid_save_as())
def test_valid_save_as_detects_breakout_to_root(self):
settings = get_settings()
article_kwargs = self._copy_page_kwargs()
- article_kwargs['metadata']['slug'] = '/foo'
- article_kwargs['settings'] = settings
+ article_kwargs["metadata"]["slug"] = "/foo"
+ article_kwargs["settings"] = settings
article = Article(**article_kwargs)
self.assertFalse(article._has_valid_save_as())
def test_valid_save_as_passes_valid(self):
settings = get_settings()
article_kwargs = self._copy_page_kwargs()
- article_kwargs['metadata']['slug'] = 'foo'
- article_kwargs['settings'] = settings
+ article_kwargs["metadata"]["slug"] = "foo"
+ article_kwargs["settings"] = settings
article = Article(**article_kwargs)
self.assertTrue(article._has_valid_save_as())
class TestStatic(LoggedTestCase):
-
def setUp(self):
super().setUp()
self.settings = get_settings(
- STATIC_SAVE_AS='{path}',
- STATIC_URL='{path}',
- PAGE_SAVE_AS=os.path.join('outpages', '{slug}.html'),
- PAGE_URL='outpages/{slug}.html')
+ STATIC_SAVE_AS="{path}",
+ STATIC_URL="{path}",
+ PAGE_SAVE_AS=os.path.join("outpages", "{slug}.html"),
+ PAGE_URL="outpages/{slug}.html",
+ )
self.context = get_context(self.settings)
- self.static = Static(content=None, metadata={}, settings=self.settings,
- source_path=posix_join('dir', 'foo.jpg'),
- context=self.context)
+ self.static = Static(
+ content=None,
+ metadata={},
+ settings=self.settings,
+ source_path=posix_join("dir", "foo.jpg"),
+ context=self.context,
+ )
- self.context['static_content'][self.static.source_path] = self.static
+ self.context["static_content"][self.static.source_path] = self.static
def tearDown(self):
pass
def test_attach_to_same_dir(self):
- """attach_to() overrides a static file's save_as and url.
- """
+ """attach_to() overrides a static file's save_as and url."""
page = Page(
content="fake page",
- metadata={'title': 'fakepage'},
+ metadata={"title": "fakepage"},
settings=self.settings,
- source_path=os.path.join('dir', 'fakepage.md'))
+ source_path=os.path.join("dir", "fakepage.md"),
+ )
self.static.attach_to(page)
- expected_save_as = os.path.join('outpages', 'foo.jpg')
+ expected_save_as = os.path.join("outpages", "foo.jpg")
self.assertEqual(self.static.save_as, expected_save_as)
self.assertEqual(self.static.url, path_to_url(expected_save_as))
def test_attach_to_parent_dir(self):
- """attach_to() preserves dirs inside the linking document dir.
- """
- page = Page(content="fake page", metadata={'title': 'fakepage'},
- settings=self.settings, source_path='fakepage.md')
+ """attach_to() preserves dirs inside the linking document dir."""
+ page = Page(
+ content="fake page",
+ metadata={"title": "fakepage"},
+ settings=self.settings,
+ source_path="fakepage.md",
+ )
self.static.attach_to(page)
- expected_save_as = os.path.join('outpages', 'dir', 'foo.jpg')
+ expected_save_as = os.path.join("outpages", "dir", "foo.jpg")
self.assertEqual(self.static.save_as, expected_save_as)
self.assertEqual(self.static.url, path_to_url(expected_save_as))
def test_attach_to_other_dir(self):
- """attach_to() ignores dirs outside the linking document dir.
- """
- page = Page(content="fake page",
- metadata={'title': 'fakepage'}, settings=self.settings,
- source_path=os.path.join('dir', 'otherdir', 'fakepage.md'))
+ """attach_to() ignores dirs outside the linking document dir."""
+ page = Page(
+ content="fake page",
+ metadata={"title": "fakepage"},
+ settings=self.settings,
+ source_path=os.path.join("dir", "otherdir", "fakepage.md"),
+ )
self.static.attach_to(page)
- expected_save_as = os.path.join('outpages', 'foo.jpg')
+ expected_save_as = os.path.join("outpages", "foo.jpg")
self.assertEqual(self.static.save_as, expected_save_as)
self.assertEqual(self.static.url, path_to_url(expected_save_as))
def test_attach_to_ignores_subsequent_calls(self):
- """attach_to() does nothing when called a second time.
- """
- page = Page(content="fake page",
- metadata={'title': 'fakepage'}, settings=self.settings,
- source_path=os.path.join('dir', 'fakepage.md'))
+ """attach_to() does nothing when called a second time."""
+ page = Page(
+ content="fake page",
+ metadata={"title": "fakepage"},
+ settings=self.settings,
+ source_path=os.path.join("dir", "fakepage.md"),
+ )
self.static.attach_to(page)
otherdir_settings = self.settings.copy()
- otherdir_settings.update(dict(
- PAGE_SAVE_AS=os.path.join('otherpages', '{slug}.html'),
- PAGE_URL='otherpages/{slug}.html'))
+ otherdir_settings.update(
+ dict(
+ PAGE_SAVE_AS=os.path.join("otherpages", "{slug}.html"),
+ PAGE_URL="otherpages/{slug}.html",
+ )
+ )
otherdir_page = Page(
content="other page",
- metadata={'title': 'otherpage'},
+ metadata={"title": "otherpage"},
settings=otherdir_settings,
- source_path=os.path.join('dir', 'otherpage.md'))
+ source_path=os.path.join("dir", "otherpage.md"),
+ )
self.static.attach_to(otherdir_page)
- otherdir_save_as = os.path.join('otherpages', 'foo.jpg')
+ otherdir_save_as = os.path.join("otherpages", "foo.jpg")
self.assertNotEqual(self.static.save_as, otherdir_save_as)
self.assertNotEqual(self.static.url, path_to_url(otherdir_save_as))
@@ -851,9 +862,10 @@ def test_attach_to_does_nothing_after_save_as_referenced(self):
page = Page(
content="fake page",
- metadata={'title': 'fakepage'},
+ metadata={"title": "fakepage"},
settings=self.settings,
- source_path=os.path.join('dir', 'fakepage.md'))
+ source_path=os.path.join("dir", "fakepage.md"),
+ )
self.static.attach_to(page)
self.assertEqual(self.static.save_as, original_save_as)
@@ -867,9 +879,10 @@ def test_attach_to_does_nothing_after_url_referenced(self):
page = Page(
content="fake page",
- metadata={'title': 'fakepage'},
+ metadata={"title": "fakepage"},
settings=self.settings,
- source_path=os.path.join('dir', 'fakepage.md'))
+ source_path=os.path.join("dir", "fakepage.md"),
+ )
self.static.attach_to(page)
self.assertEqual(self.static.save_as, self.static.source_path)
@@ -881,38 +894,41 @@ def test_attach_to_does_not_override_an_override(self):
"""
customstatic = Static(
content=None,
- metadata=dict(save_as='customfoo.jpg', url='customfoo.jpg'),
+ metadata=dict(save_as="customfoo.jpg", url="customfoo.jpg"),
settings=self.settings,
- source_path=os.path.join('dir', 'foo.jpg'),
- context=self.settings.copy())
+ source_path=os.path.join("dir", "foo.jpg"),
+ context=self.settings.copy(),
+ )
page = Page(
content="fake page",
- metadata={'title': 'fakepage'}, settings=self.settings,
- source_path=os.path.join('dir', 'fakepage.md'))
+ metadata={"title": "fakepage"},
+ settings=self.settings,
+ source_path=os.path.join("dir", "fakepage.md"),
+ )
customstatic.attach_to(page)
- self.assertEqual(customstatic.save_as, 'customfoo.jpg')
- self.assertEqual(customstatic.url, 'customfoo.jpg')
+ self.assertEqual(customstatic.save_as, "customfoo.jpg")
+ self.assertEqual(customstatic.url, "customfoo.jpg")
def test_attach_link_syntax(self):
- """{attach} link syntax triggers output path override & url replacement.
- """
+ """{attach} link syntax triggers output path override & url replacement."""
html = '<a href="{attach}../foo.jpg">link</a>'
page = Page(
content=html,
- metadata={'title': 'fakepage'},
+ metadata={"title": "fakepage"},
settings=self.settings,
- source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
- context=self.context)
- content = page.get_content('')
+ source_path=os.path.join("dir", "otherdir", "fakepage.md"),
+ context=self.context,
+ )
+ content = page.get_content("")
self.assertNotEqual(
- content, html,
- "{attach} link syntax did not trigger URL replacement.")
+ content, html, "{attach} link syntax did not trigger URL replacement."
+ )
- expected_save_as = os.path.join('outpages', 'foo.jpg')
+ expected_save_as = os.path.join("outpages", "foo.jpg")
self.assertEqual(self.static.save_as, expected_save_as)
self.assertEqual(self.static.url, path_to_url(expected_save_as))
@@ -922,11 +938,12 @@ def test_tag_link_syntax(self):
html = '<a href="{tag}foo">link</a>'
page = Page(
content=html,
- metadata={'title': 'fakepage'},
+ metadata={"title": "fakepage"},
settings=self.settings,
- source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
- context=self.context)
- content = page.get_content('')
+ source_path=os.path.join("dir", "otherdir", "fakepage.md"),
+ context=self.context,
+ )
+ content = page.get_content("")
self.assertNotEqual(content, html)
@@ -936,11 +953,12 @@ def test_category_link_syntax(self):
html = '<a href="{category}foo">link</a>'
page = Page(
content=html,
- metadata={'title': 'fakepage'},
+ metadata={"title": "fakepage"},
settings=self.settings,
- source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
- context=self.context)
- content = page.get_content('')
+ source_path=os.path.join("dir", "otherdir", "fakepage.md"),
+ context=self.context,
+ )
+ content = page.get_content("")
self.assertNotEqual(content, html)
@@ -950,11 +968,12 @@ def test_author_link_syntax(self):
html = '<a href="{author}foo">link</a>'
page = Page(
content=html,
- metadata={'title': 'fakepage'},
+ metadata={"title": "fakepage"},
settings=self.settings,
- source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
- context=self.context)
- content = page.get_content('')
+ source_path=os.path.join("dir", "otherdir", "fakepage.md"),
+ context=self.context,
+ )
+ content = page.get_content("")
self.assertNotEqual(content, html)
@@ -964,52 +983,62 @@ def test_index_link_syntax(self):
html = '<a href="{index}">link</a>'
page = Page(
content=html,
- metadata={'title': 'fakepage'},
+ metadata={"title": "fakepage"},
settings=self.settings,
- source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
- context=self.context)
- content = page.get_content('')
+ source_path=os.path.join("dir", "otherdir", "fakepage.md"),
+ context=self.context,
+ )
+ content = page.get_content("")
self.assertNotEqual(content, html)
- expected_html = ('<a href="' +
- '/'.join((self.settings['SITEURL'],
- self.settings['INDEX_SAVE_AS'])) +
- '">link</a>')
+ expected_html = (
+ '<a href="'
+ + "/".join((self.settings["SITEURL"], self.settings["INDEX_SAVE_AS"]))
+ + '">link</a>'
+ )
self.assertEqual(content, expected_html)
def test_unknown_link_syntax(self):
"{unknown} link syntax should trigger warning."
html = '<a href="{unknown}foo">link</a>'
- page = Page(content=html,
- metadata={'title': 'fakepage'}, settings=self.settings,
- source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
- context=self.context)
- content = page.get_content('')
+ page = Page(
+ content=html,
+ metadata={"title": "fakepage"},
+ settings=self.settings,
+ source_path=os.path.join("dir", "otherdir", "fakepage.md"),
+ context=self.context,
+ )
+ content = page.get_content("")
self.assertEqual(content, html)
self.assertLogCountEqual(
count=1,
msg="Replacement Indicator 'unknown' not recognized, "
- "skipping replacement",
- level=logging.WARNING)
+ "skipping replacement",
+ level=logging.WARNING,
+ )
def test_link_to_unknown_file(self):
"{filename} link to unknown file should trigger warning."
html = '<a href="{filename}foo">link</a>'
- page = Page(content=html,
- metadata={'title': 'fakepage'}, settings=self.settings,
- source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
- context=self.context)
- content = page.get_content('')
+ page = Page(
+ content=html,
+ metadata={"title": "fakepage"},
+ settings=self.settings,
+ source_path=os.path.join("dir", "otherdir", "fakepage.md"),
+ context=self.context,
+ )
+ content = page.get_content("")
self.assertEqual(content, html)
self.assertLogCountEqual(
count=1,
msg="Unable to find 'foo', skipping url replacement.",
- level=logging.WARNING)
+ level=logging.WARNING,
+ )
def test_index_link_syntax_with_spaces(self):
"""{index} link syntax triggers url replacement
@@ -1018,18 +1047,20 @@ def test_index_link_syntax_with_spaces(self):
html = '<a href = "{index}">link</a>'
page = Page(
content=html,
- metadata={'title': 'fakepage'},
+ metadata={"title": "fakepage"},
settings=self.settings,
- source_path=os.path.join('dir', 'otherdir', 'fakepage.md'),
- context=self.context)
- content = page.get_content('')
+ source_path=os.path.join("dir", "otherdir", "fakepage.md"),
+ context=self.context,
+ )
+ content = page.get_content("")
self.assertNotEqual(content, html)
- expected_html = ('<a href = "' +
- '/'.join((self.settings['SITEURL'],
- self.settings['INDEX_SAVE_AS'])) +
- '">link</a>')
+ expected_html = (
+ '<a href = "'
+ + "/".join((self.settings["SITEURL"], self.settings["INDEX_SAVE_AS"]))
+ + '">link</a>'
+ )
self.assertEqual(content, expected_html)
def test_not_save_as_draft(self):
@@ -1037,12 +1068,15 @@ def test_not_save_as_draft(self):
static = Static(
content=None,
- metadata=dict(status='draft',),
+ metadata=dict(
+ status="draft",
+ ),
settings=self.settings,
- source_path=os.path.join('dir', 'foo.jpg'),
- context=self.settings.copy())
+ source_path=os.path.join("dir", "foo.jpg"),
+ context=self.settings.copy(),
+ )
- expected_save_as = posixize_path(os.path.join('dir', 'foo.jpg'))
- self.assertEqual(static.status, 'draft')
+ expected_save_as = posixize_path(os.path.join("dir", "foo.jpg"))
+ self.assertEqual(static.status, "draft")
self.assertEqual(static.save_as, expected_save_as)
self.assertEqual(static.url, path_to_url(expected_save_as))
diff --git a/pelican/tests/test_generators.py b/pelican/tests/test_generators.py
--- a/pelican/tests/test_generators.py
+++ b/pelican/tests/test_generators.py
@@ -4,293 +4,383 @@
from tempfile import mkdtemp
from unittest.mock import MagicMock
-from pelican.generators import (ArticlesGenerator, Generator, PagesGenerator,
- PelicanTemplateNotFound, StaticGenerator,
- TemplatePagesGenerator)
-from pelican.tests.support import (can_symlink, get_context, get_settings,
- unittest, TestCaseWithCLocale)
+from pelican.generators import (
+ ArticlesGenerator,
+ Generator,
+ PagesGenerator,
+ PelicanTemplateNotFound,
+ StaticGenerator,
+ TemplatePagesGenerator,
+)
+from pelican.tests.support import (
+ can_symlink,
+ get_context,
+ get_settings,
+ unittest,
+ TestCaseWithCLocale,
+)
from pelican.writers import Writer
CUR_DIR = os.path.dirname(__file__)
-CONTENT_DIR = os.path.join(CUR_DIR, 'content')
+CONTENT_DIR = os.path.join(CUR_DIR, "content")
class TestGenerator(TestCaseWithCLocale):
def setUp(self):
super().setUp()
self.settings = get_settings()
- self.settings['READERS'] = {'asc': None}
- self.generator = Generator(self.settings.copy(), self.settings,
- CUR_DIR, self.settings['THEME'], None)
+ self.settings["READERS"] = {"asc": None}
+ self.generator = Generator(
+ self.settings.copy(), self.settings, CUR_DIR, self.settings["THEME"], None
+ )
def test_include_path(self):
- self.settings['IGNORE_FILES'] = {'ignored1.rst', 'ignored2.rst'}
+ self.settings["IGNORE_FILES"] = {"ignored1.rst", "ignored2.rst"}
- filename = os.path.join(CUR_DIR, 'content', 'article.rst')
+ filename = os.path.join(CUR_DIR, "content", "article.rst")
include_path = self.generator._include_path
self.assertTrue(include_path(filename))
- self.assertTrue(include_path(filename, extensions=('rst',)))
- self.assertFalse(include_path(filename, extensions=('md',)))
+ self.assertTrue(include_path(filename, extensions=("rst",)))
+ self.assertFalse(include_path(filename, extensions=("md",)))
- ignored_file = os.path.join(CUR_DIR, 'content', 'ignored1.rst')
+ ignored_file = os.path.join(CUR_DIR, "content", "ignored1.rst")
self.assertFalse(include_path(ignored_file))
def test_get_files_exclude(self):
- """Test that Generator.get_files() properly excludes directories.
- """
+ """Test that Generator.get_files() properly excludes directories."""
# We use our own Generator so we can give it our own content path
generator = Generator(
context=self.settings.copy(),
settings=self.settings,
- path=os.path.join(CUR_DIR, 'nested_content'),
- theme=self.settings['THEME'], output_path=None)
+ path=os.path.join(CUR_DIR, "nested_content"),
+ theme=self.settings["THEME"],
+ output_path=None,
+ )
- filepaths = generator.get_files(paths=['maindir'])
+ filepaths = generator.get_files(paths=["maindir"])
found_files = {os.path.basename(f) for f in filepaths}
- expected_files = {'maindir.md', 'subdir.md'}
+ expected_files = {"maindir.md", "subdir.md"}
self.assertFalse(
- expected_files - found_files,
- "get_files() failed to find one or more files")
+ expected_files - found_files, "get_files() failed to find one or more files"
+ )
# Test string as `paths` argument rather than list
- filepaths = generator.get_files(paths='maindir')
+ filepaths = generator.get_files(paths="maindir")
found_files = {os.path.basename(f) for f in filepaths}
- expected_files = {'maindir.md', 'subdir.md'}
+ expected_files = {"maindir.md", "subdir.md"}
self.assertFalse(
- expected_files - found_files,
- "get_files() failed to find one or more files")
+ expected_files - found_files, "get_files() failed to find one or more files"
+ )
- filepaths = generator.get_files(paths=[''], exclude=['maindir'])
+ filepaths = generator.get_files(paths=[""], exclude=["maindir"])
found_files = {os.path.basename(f) for f in filepaths}
self.assertNotIn(
- 'maindir.md', found_files,
- "get_files() failed to exclude a top-level directory")
+ "maindir.md",
+ found_files,
+ "get_files() failed to exclude a top-level directory",
+ )
self.assertNotIn(
- 'subdir.md', found_files,
- "get_files() failed to exclude a subdir of an excluded directory")
+ "subdir.md",
+ found_files,
+ "get_files() failed to exclude a subdir of an excluded directory",
+ )
filepaths = generator.get_files(
- paths=[''],
- exclude=[os.path.join('maindir', 'subdir')])
+ paths=[""], exclude=[os.path.join("maindir", "subdir")]
+ )
found_files = {os.path.basename(f) for f in filepaths}
self.assertNotIn(
- 'subdir.md', found_files,
- "get_files() failed to exclude a subdirectory")
+ "subdir.md", found_files, "get_files() failed to exclude a subdirectory"
+ )
- filepaths = generator.get_files(paths=[''], exclude=['subdir'])
+ filepaths = generator.get_files(paths=[""], exclude=["subdir"])
found_files = {os.path.basename(f) for f in filepaths}
self.assertIn(
- 'subdir.md', found_files,
- "get_files() excluded a subdirectory by name, ignoring its path")
+ "subdir.md",
+ found_files,
+ "get_files() excluded a subdirectory by name, ignoring its path",
+ )
def test_custom_jinja_environment(self):
"""
- Test that setting the JINJA_ENVIRONMENT
- properly gets set from the settings config
+ Test that setting the JINJA_ENVIRONMENT
+ properly gets set from the settings config
"""
settings = get_settings()
- comment_start_string = 'abc'
- comment_end_string = '/abc'
- settings['JINJA_ENVIRONMENT'] = {
- 'comment_start_string': comment_start_string,
- 'comment_end_string': comment_end_string
+ comment_start_string = "abc"
+ comment_end_string = "/abc"
+ settings["JINJA_ENVIRONMENT"] = {
+ "comment_start_string": comment_start_string,
+ "comment_end_string": comment_end_string,
}
- generator = Generator(settings.copy(), settings,
- CUR_DIR, settings['THEME'], None)
- self.assertEqual(comment_start_string,
- generator.env.comment_start_string)
- self.assertEqual(comment_end_string,
- generator.env.comment_end_string)
+ generator = Generator(
+ settings.copy(), settings, CUR_DIR, settings["THEME"], None
+ )
+ self.assertEqual(comment_start_string, generator.env.comment_start_string)
+ self.assertEqual(comment_end_string, generator.env.comment_end_string)
def test_theme_overrides(self):
"""
- Test that the THEME_TEMPLATES_OVERRIDES configuration setting is
- utilized correctly in the Generator.
+ Test that the THEME_TEMPLATES_OVERRIDES configuration setting is
+ utilized correctly in the Generator.
"""
- override_dirs = (os.path.join(CUR_DIR, 'theme_overrides', 'level1'),
- os.path.join(CUR_DIR, 'theme_overrides', 'level2'))
- self.settings['THEME_TEMPLATES_OVERRIDES'] = override_dirs
+ override_dirs = (
+ os.path.join(CUR_DIR, "theme_overrides", "level1"),
+ os.path.join(CUR_DIR, "theme_overrides", "level2"),
+ )
+ self.settings["THEME_TEMPLATES_OVERRIDES"] = override_dirs
generator = Generator(
context=self.settings.copy(),
settings=self.settings,
path=CUR_DIR,
- theme=self.settings['THEME'],
- output_path=None)
+ theme=self.settings["THEME"],
+ output_path=None,
+ )
- filename = generator.get_template('article').filename
+ filename = generator.get_template("article").filename
self.assertEqual(override_dirs[0], os.path.dirname(filename))
- self.assertEqual('article.html', os.path.basename(filename))
+ self.assertEqual("article.html", os.path.basename(filename))
- filename = generator.get_template('authors').filename
+ filename = generator.get_template("authors").filename
self.assertEqual(override_dirs[1], os.path.dirname(filename))
- self.assertEqual('authors.html', os.path.basename(filename))
+ self.assertEqual("authors.html", os.path.basename(filename))
- filename = generator.get_template('taglist').filename
- self.assertEqual(os.path.join(self.settings['THEME'], 'templates'),
- os.path.dirname(filename))
+ filename = generator.get_template("taglist").filename
+ self.assertEqual(
+ os.path.join(self.settings["THEME"], "templates"), os.path.dirname(filename)
+ )
self.assertNotIn(os.path.dirname(filename), override_dirs)
- self.assertEqual('taglist.html', os.path.basename(filename))
+ self.assertEqual("taglist.html", os.path.basename(filename))
def test_simple_prefix(self):
"""
- Test `!simple` theme prefix.
+ Test `!simple` theme prefix.
"""
- filename = self.generator.get_template('!simple/authors').filename
+ filename = self.generator.get_template("!simple/authors").filename
expected_path = os.path.join(
- os.path.dirname(CUR_DIR), 'themes', 'simple', 'templates')
+ os.path.dirname(CUR_DIR), "themes", "simple", "templates"
+ )
self.assertEqual(expected_path, os.path.dirname(filename))
- self.assertEqual('authors.html', os.path.basename(filename))
+ self.assertEqual("authors.html", os.path.basename(filename))
def test_theme_prefix(self):
"""
- Test `!theme` theme prefix.
+ Test `!theme` theme prefix.
"""
- filename = self.generator.get_template('!theme/authors').filename
+ filename = self.generator.get_template("!theme/authors").filename
expected_path = os.path.join(
- os.path.dirname(CUR_DIR), 'themes', 'notmyidea', 'templates')
+ os.path.dirname(CUR_DIR), "themes", "notmyidea", "templates"
+ )
self.assertEqual(expected_path, os.path.dirname(filename))
- self.assertEqual('authors.html', os.path.basename(filename))
+ self.assertEqual("authors.html", os.path.basename(filename))
def test_bad_prefix(self):
"""
- Test unknown/bad theme prefix throws exception.
+ Test unknown/bad theme prefix throws exception.
"""
- self.assertRaises(PelicanTemplateNotFound, self.generator.get_template,
- '!UNKNOWN/authors')
+ self.assertRaises(
+ PelicanTemplateNotFound, self.generator.get_template, "!UNKNOWN/authors"
+ )
class TestArticlesGenerator(unittest.TestCase):
-
@classmethod
def setUpClass(cls):
settings = get_settings()
- settings['DEFAULT_CATEGORY'] = 'Default'
- settings['DEFAULT_DATE'] = (1970, 1, 1)
- settings['READERS'] = {'asc': None}
- settings['CACHE_CONTENT'] = False
+ settings["DEFAULT_CATEGORY"] = "Default"
+ settings["DEFAULT_DATE"] = (1970, 1, 1)
+ settings["READERS"] = {"asc": None}
+ settings["CACHE_CONTENT"] = False
context = get_context(settings)
cls.generator = ArticlesGenerator(
- context=context, settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
cls.generator.generate_context()
cls.articles = cls.distill_articles(cls.generator.articles)
cls.drafts = cls.distill_articles(cls.generator.drafts)
cls.hidden_articles = cls.distill_articles(cls.generator.hidden_articles)
def setUp(self):
- self.temp_cache = mkdtemp(prefix='pelican_cache.')
+ self.temp_cache = mkdtemp(prefix="pelican_cache.")
def tearDown(self):
rmtree(self.temp_cache)
@staticmethod
def distill_articles(articles):
- return [[article.title, article.status, article.category.name,
- article.template] for article in articles]
+ return [
+ [article.title, article.status, article.category.name, article.template]
+ for article in articles
+ ]
def test_generate_feeds(self):
settings = get_settings()
- settings['CACHE_PATH'] = self.temp_cache
+ settings["CACHE_PATH"] = self.temp_cache
generator = ArticlesGenerator(
- context=settings, settings=settings,
- path=None, theme=settings['THEME'], output_path=None)
+ context=settings,
+ settings=settings,
+ path=None,
+ theme=settings["THEME"],
+ output_path=None,
+ )
writer = MagicMock()
generator.generate_feeds(writer)
- writer.write_feed.assert_called_with([], settings,
- 'feeds/all.atom.xml',
- 'feeds/all.atom.xml')
+ writer.write_feed.assert_called_with(
+ [], settings, "feeds/all.atom.xml", "feeds/all.atom.xml"
+ )
generator = ArticlesGenerator(
- context=settings, settings=get_settings(FEED_ALL_ATOM=None),
- path=None, theme=settings['THEME'], output_path=None)
+ context=settings,
+ settings=get_settings(FEED_ALL_ATOM=None),
+ path=None,
+ theme=settings["THEME"],
+ output_path=None,
+ )
writer = MagicMock()
generator.generate_feeds(writer)
self.assertFalse(writer.write_feed.called)
def test_generate_feeds_override_url(self):
settings = get_settings()
- settings['CACHE_PATH'] = self.temp_cache
- settings['FEED_ALL_ATOM_URL'] = 'feeds/atom/all/'
+ settings["CACHE_PATH"] = self.temp_cache
+ settings["FEED_ALL_ATOM_URL"] = "feeds/atom/all/"
generator = ArticlesGenerator(
- context=settings, settings=settings,
- path=None, theme=settings['THEME'], output_path=None)
+ context=settings,
+ settings=settings,
+ path=None,
+ theme=settings["THEME"],
+ output_path=None,
+ )
writer = MagicMock()
generator.generate_feeds(writer)
- writer.write_feed.assert_called_with([], settings,
- 'feeds/all.atom.xml',
- 'feeds/atom/all/')
+ writer.write_feed.assert_called_with(
+ [], settings, "feeds/all.atom.xml", "feeds/atom/all/"
+ )
def test_generate_context(self):
articles_expected = [
- ['Article title', 'published', 'Default', 'article'],
- ['Article with markdown and summary metadata multi', 'published',
- 'Default', 'article'],
- ['Article with markdown and nested summary metadata', 'published',
- 'Default', 'article'],
- ['Article with markdown and summary metadata single', 'published',
- 'Default', 'article'],
- ['Article with markdown containing footnotes', 'published',
- 'Default', 'article'],
- ['Article with template', 'published', 'Default', 'custom'],
- ['Metadata tags as list!', 'published', 'Default', 'article'],
- ['Rst with filename metadata', 'published', 'yeah', 'article'],
- ['One -, two --, three --- dashes!', 'published', 'Default',
- 'article'],
- ['One -, two --, three --- dashes!', 'published', 'Default',
- 'article'],
- ['Test Markdown extensions', 'published', 'Default', 'article'],
- ['Test markdown File', 'published', 'test', 'article'],
- ['Test md File', 'published', 'test', 'article'],
- ['Test mdown File', 'published', 'test', 'article'],
- ['Test metadata duplicates', 'published', 'test', 'article'],
- ['Test mkd File', 'published', 'test', 'article'],
- ['This is a super article !', 'published', 'Yeah', 'article'],
- ['This is a super article !', 'published', 'Yeah', 'article'],
- ['Article with Nonconformant HTML meta tags', 'published',
- 'Default', 'article'],
- ['This is a super article !', 'published', 'yeah', 'article'],
- ['This is a super article !', 'published', 'yeah', 'article'],
- ['This is a super article !', 'published', 'yeah', 'article'],
- ['This is a super article !', 'published', 'yeah', 'article'],
- ['This is a super article !', 'published', 'yeah', 'article'],
- ['This is a super article !', 'published', 'yeah', 'article'],
- ['This is a super article !', 'published', 'yeah', 'article'],
- ['This is a super article !', 'published', 'yeah', 'article'],
- ['This is a super article !', 'published', 'Default', 'article'],
- ['Article with an inline SVG', 'published', 'Default', 'article'],
- ['Article with markdown and empty tags', 'published', 'Default',
- 'article'],
- ['This is an article with category !', 'published', 'yeah',
- 'article'],
- ['This is an article with multiple authors!', 'published',
- 'Default', 'article'],
- ['This is an article with multiple authors!', 'published',
- 'Default', 'article'],
- ['This is an article with multiple authors in list format!',
- 'published', 'Default', 'article'],
- ['This is an article with multiple authors in lastname, '
- 'firstname format!', 'published', 'Default', 'article'],
- ['This is an article without category !', 'published', 'Default',
- 'article'],
- ['This is an article without category !', 'published',
- 'TestCategory', 'article'],
- ['An Article With Code Block To Test Typogrify Ignore',
- 'published', 'Default', 'article'],
- ['マックOS X 10.8でパイソンとVirtualenvをインストールと設定',
- 'published', '指導書', 'article'],
+ ["Article title", "published", "Default", "article"],
+ [
+ "Article with markdown and summary metadata multi",
+ "published",
+ "Default",
+ "article",
+ ],
+ [
+ "Article with markdown and nested summary metadata",
+ "published",
+ "Default",
+ "article",
+ ],
+ [
+ "Article with markdown and summary metadata single",
+ "published",
+ "Default",
+ "article",
+ ],
+ [
+ "Article with markdown containing footnotes",
+ "published",
+ "Default",
+ "article",
+ ],
+ ["Article with template", "published", "Default", "custom"],
+ ["Metadata tags as list!", "published", "Default", "article"],
+ ["Rst with filename metadata", "published", "yeah", "article"],
+ ["One -, two --, three --- dashes!", "published", "Default", "article"],
+ ["One -, two --, three --- dashes!", "published", "Default", "article"],
+ ["Test Markdown extensions", "published", "Default", "article"],
+ ["Test markdown File", "published", "test", "article"],
+ ["Test md File", "published", "test", "article"],
+ ["Test mdown File", "published", "test", "article"],
+ ["Test metadata duplicates", "published", "test", "article"],
+ ["Test mkd File", "published", "test", "article"],
+ ["This is a super article !", "published", "Yeah", "article"],
+ ["This is a super article !", "published", "Yeah", "article"],
+ [
+ "Article with Nonconformant HTML meta tags",
+ "published",
+ "Default",
+ "article",
+ ],
+ ["This is a super article !", "published", "yeah", "article"],
+ ["This is a super article !", "published", "yeah", "article"],
+ ["This is a super article !", "published", "yeah", "article"],
+ ["This is a super article !", "published", "yeah", "article"],
+ ["This is a super article !", "published", "yeah", "article"],
+ ["This is a super article !", "published", "yeah", "article"],
+ ["This is a super article !", "published", "yeah", "article"],
+ ["This is a super article !", "published", "yeah", "article"],
+ ["This is a super article !", "published", "Default", "article"],
+ ["Article with an inline SVG", "published", "Default", "article"],
+ ["Article with markdown and empty tags", "published", "Default", "article"],
+ ["This is an article with category !", "published", "yeah", "article"],
+ [
+ "This is an article with multiple authors!",
+ "published",
+ "Default",
+ "article",
+ ],
+ [
+ "This is an article with multiple authors!",
+ "published",
+ "Default",
+ "article",
+ ],
+ [
+ "This is an article with multiple authors in list format!",
+ "published",
+ "Default",
+ "article",
+ ],
+ [
+ "This is an article with multiple authors in lastname, "
+ "firstname format!",
+ "published",
+ "Default",
+ "article",
+ ],
+ [
+ "This is an article without category !",
+ "published",
+ "Default",
+ "article",
+ ],
+ [
+ "This is an article without category !",
+ "published",
+ "TestCategory",
+ "article",
+ ],
+ [
+ "An Article With Code Block To Test Typogrify Ignore",
+ "published",
+ "Default",
+ "article",
+ ],
+ [
+ "マックOS X 10.8でパイソンとVirtualenvをインストールと設定",
+ "published",
+ "指導書",
+ "article",
+ ],
]
self.assertEqual(sorted(articles_expected), sorted(self.articles))
def test_articles_draft(self):
draft_articles_expected = [
- ['Draft article', 'draft', 'Default', 'article'],
+ ["Draft article", "draft", "Default", "article"],
]
self.assertEqual(sorted(draft_articles_expected), sorted(self.drafts))
def test_articles_hidden(self):
hidden_articles_expected = [
- ['Hidden article', 'hidden', 'Default', 'article'],
+ ["Hidden article", "hidden", "Default", "article"],
]
self.assertEqual(sorted(hidden_articles_expected), sorted(self.hidden_articles))
@@ -301,27 +391,30 @@ def test_generate_categories(self):
# terms of process order will define the name for that category
categories = [cat.name for cat, _ in self.generator.categories]
categories_alternatives = (
- sorted(['Default', 'TestCategory', 'Yeah', 'test', '指導書']),
- sorted(['Default', 'TestCategory', 'yeah', 'test', '指導書']),
+ sorted(["Default", "TestCategory", "Yeah", "test", "指導書"]),
+ sorted(["Default", "TestCategory", "yeah", "test", "指導書"]),
)
self.assertIn(sorted(categories), categories_alternatives)
# test for slug
categories = [cat.slug for cat, _ in self.generator.categories]
- categories_expected = ['default', 'testcategory', 'yeah', 'test',
- 'zhi-dao-shu']
+ categories_expected = ["default", "testcategory", "yeah", "test", "zhi-dao-shu"]
self.assertEqual(sorted(categories), sorted(categories_expected))
def test_do_not_use_folder_as_category(self):
settings = get_settings()
- settings['DEFAULT_CATEGORY'] = 'Default'
- settings['DEFAULT_DATE'] = (1970, 1, 1)
- settings['USE_FOLDER_AS_CATEGORY'] = False
- settings['CACHE_PATH'] = self.temp_cache
- settings['READERS'] = {'asc': None}
+ settings["DEFAULT_CATEGORY"] = "Default"
+ settings["DEFAULT_DATE"] = (1970, 1, 1)
+ settings["USE_FOLDER_AS_CATEGORY"] = False
+ settings["CACHE_PATH"] = self.temp_cache
+ settings["READERS"] = {"asc": None}
context = get_context(settings)
generator = ArticlesGenerator(
- context=context, settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
# test for name
# categories are grouped by slug; if two categories have the same slug
@@ -329,61 +422,79 @@ def test_do_not_use_folder_as_category(self):
# terms of process order will define the name for that category
categories = [cat.name for cat, _ in generator.categories]
categories_alternatives = (
- sorted(['Default', 'Yeah', 'test', '指導書']),
- sorted(['Default', 'yeah', 'test', '指導書']),
+ sorted(["Default", "Yeah", "test", "指導書"]),
+ sorted(["Default", "yeah", "test", "指導書"]),
)
self.assertIn(sorted(categories), categories_alternatives)
# test for slug
categories = [cat.slug for cat, _ in generator.categories]
- categories_expected = ['default', 'yeah', 'test', 'zhi-dao-shu']
+ categories_expected = ["default", "yeah", "test", "zhi-dao-shu"]
self.assertEqual(sorted(categories), sorted(categories_expected))
def test_direct_templates_save_as_url_default(self):
-
settings = get_settings()
- settings['CACHE_PATH'] = self.temp_cache
+ settings["CACHE_PATH"] = self.temp_cache
context = get_context(settings)
generator = ArticlesGenerator(
- context=context, settings=settings,
- path=None, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=None,
+ theme=settings["THEME"],
+ output_path=None,
+ )
write = MagicMock()
generator.generate_direct_templates(write)
- write.assert_called_with("archives.html",
- generator.get_template("archives"), context,
- articles=generator.articles,
- dates=generator.dates, blog=True,
- template_name='archives',
- page_name='archives', url="archives.html")
+ write.assert_called_with(
+ "archives.html",
+ generator.get_template("archives"),
+ context,
+ articles=generator.articles,
+ dates=generator.dates,
+ blog=True,
+ template_name="archives",
+ page_name="archives",
+ url="archives.html",
+ )
def test_direct_templates_save_as_url_modified(self):
-
settings = get_settings()
- settings['DIRECT_TEMPLATES'] = ['archives']
- settings['ARCHIVES_SAVE_AS'] = 'archives/index.html'
- settings['ARCHIVES_URL'] = 'archives/'
- settings['CACHE_PATH'] = self.temp_cache
+ settings["DIRECT_TEMPLATES"] = ["archives"]
+ settings["ARCHIVES_SAVE_AS"] = "archives/index.html"
+ settings["ARCHIVES_URL"] = "archives/"
+ settings["CACHE_PATH"] = self.temp_cache
generator = ArticlesGenerator(
- context=settings, settings=settings,
- path=None, theme=settings['THEME'], output_path=None)
+ context=settings,
+ settings=settings,
+ path=None,
+ theme=settings["THEME"],
+ output_path=None,
+ )
write = MagicMock()
generator.generate_direct_templates(write)
- write.assert_called_with("archives/index.html",
- generator.get_template("archives"), settings,
- articles=generator.articles,
- dates=generator.dates, blog=True,
- template_name='archives',
- page_name='archives/index',
- url="archives/")
+ write.assert_called_with(
+ "archives/index.html",
+ generator.get_template("archives"),
+ settings,
+ articles=generator.articles,
+ dates=generator.dates,
+ blog=True,
+ template_name="archives",
+ page_name="archives/index",
+ url="archives/",
+ )
def test_direct_templates_save_as_false(self):
-
settings = get_settings()
- settings['DIRECT_TEMPLATES'] = ['archives']
- settings['ARCHIVES_SAVE_AS'] = False
- settings['CACHE_PATH'] = self.temp_cache
+ settings["DIRECT_TEMPLATES"] = ["archives"]
+ settings["ARCHIVES_SAVE_AS"] = False
+ settings["CACHE_PATH"] = self.temp_cache
generator = ArticlesGenerator(
- context=settings, settings=settings,
- path=None, theme=settings['THEME'], output_path=None)
+ context=settings,
+ settings=settings,
+ path=None,
+ theme=settings["THEME"],
+ output_path=None,
+ )
write = MagicMock()
generator.generate_direct_templates(write)
self.assertEqual(write.call_count, 0)
@@ -392,10 +503,13 @@ def test_per_article_template(self):
"""
Custom template articles get the field but standard/unset are None
"""
- custom_template = ['Article with template', 'published', 'Default',
- 'custom']
- standard_template = ['This is a super article !', 'published', 'Yeah',
- 'article']
+ custom_template = ["Article with template", "published", "Default", "custom"]
+ standard_template = [
+ "This is a super article !",
+ "published",
+ "Yeah",
+ "article",
+ ]
self.assertIn(custom_template, self.articles)
self.assertIn(standard_template, self.articles)
@@ -403,126 +517,135 @@ def test_period_archives_context(self):
"""Test correctness of the period_archives context values."""
settings = get_settings()
- settings['CACHE_PATH'] = self.temp_cache
+ settings["CACHE_PATH"] = self.temp_cache
# No period archives enabled:
context = get_context(settings)
generator = ArticlesGenerator(
- context=context, settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
- period_archives = generator.context['period_archives']
+ period_archives = generator.context["period_archives"]
self.assertEqual(len(period_archives.items()), 0)
# Year archives enabled:
- settings['YEAR_ARCHIVE_SAVE_AS'] = 'posts/{date:%Y}/index.html'
- settings['YEAR_ARCHIVE_URL'] = 'posts/{date:%Y}/'
+ settings["YEAR_ARCHIVE_SAVE_AS"] = "posts/{date:%Y}/index.html"
+ settings["YEAR_ARCHIVE_URL"] = "posts/{date:%Y}/"
context = get_context(settings)
generator = ArticlesGenerator(
- context=context, settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
- period_archives = generator.context['period_archives']
+ period_archives = generator.context["period_archives"]
abbreviated_archives = {
- granularity: {period['period'] for period in periods}
+ granularity: {period["period"] for period in periods}
for granularity, periods in period_archives.items()
}
- expected = {'year': {(1970,), (2010,), (2012,), (2014,)}}
+ expected = {"year": {(1970,), (2010,), (2012,), (2014,)}}
self.assertEqual(expected, abbreviated_archives)
# Month archives enabled:
- settings['MONTH_ARCHIVE_SAVE_AS'] = \
- 'posts/{date:%Y}/{date:%b}/index.html'
- settings['MONTH_ARCHIVE_URL'] = \
- 'posts/{date:%Y}/{date:%b}/'
+ settings["MONTH_ARCHIVE_SAVE_AS"] = "posts/{date:%Y}/{date:%b}/index.html"
+ settings["MONTH_ARCHIVE_URL"] = "posts/{date:%Y}/{date:%b}/"
context = get_context(settings)
generator = ArticlesGenerator(
- context=context, settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
- period_archives = generator.context['period_archives']
+ period_archives = generator.context["period_archives"]
abbreviated_archives = {
- granularity: {period['period'] for period in periods}
+ granularity: {period["period"] for period in periods}
for granularity, periods in period_archives.items()
}
expected = {
- 'year': {(1970,), (2010,), (2012,), (2014,)},
- 'month': {
- (1970, 'January'),
- (2010, 'December'),
- (2012, 'December'),
- (2012, 'November'),
- (2012, 'October'),
- (2014, 'February'),
+ "year": {(1970,), (2010,), (2012,), (2014,)},
+ "month": {
+ (1970, "January"),
+ (2010, "December"),
+ (2012, "December"),
+ (2012, "November"),
+ (2012, "October"),
+ (2014, "February"),
},
}
self.assertEqual(expected, abbreviated_archives)
# Day archives enabled:
- settings['DAY_ARCHIVE_SAVE_AS'] = \
- 'posts/{date:%Y}/{date:%b}/{date:%d}/index.html'
- settings['DAY_ARCHIVE_URL'] = \
- 'posts/{date:%Y}/{date:%b}/{date:%d}/'
+ settings[
+ "DAY_ARCHIVE_SAVE_AS"
+ ] = "posts/{date:%Y}/{date:%b}/{date:%d}/index.html"
+ settings["DAY_ARCHIVE_URL"] = "posts/{date:%Y}/{date:%b}/{date:%d}/"
context = get_context(settings)
generator = ArticlesGenerator(
- context=context, settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
- period_archives = generator.context['period_archives']
+ period_archives = generator.context["period_archives"]
abbreviated_archives = {
- granularity: {period['period'] for period in periods}
+ granularity: {period["period"] for period in periods}
for granularity, periods in period_archives.items()
}
expected = {
- 'year': {(1970,), (2010,), (2012,), (2014,)},
- 'month': {
- (1970, 'January'),
- (2010, 'December'),
- (2012, 'December'),
- (2012, 'November'),
- (2012, 'October'),
- (2014, 'February'),
+ "year": {(1970,), (2010,), (2012,), (2014,)},
+ "month": {
+ (1970, "January"),
+ (2010, "December"),
+ (2012, "December"),
+ (2012, "November"),
+ (2012, "October"),
+ (2014, "February"),
},
- 'day': {
- (1970, 'January', 1),
- (2010, 'December', 2),
- (2012, 'December', 20),
- (2012, 'November', 29),
- (2012, 'October', 30),
- (2012, 'October', 31),
- (2014, 'February', 9),
+ "day": {
+ (1970, "January", 1),
+ (2010, "December", 2),
+ (2012, "December", 20),
+ (2012, "November", 29),
+ (2012, "October", 30),
+ (2012, "October", 31),
+ (2014, "February", 9),
},
}
self.assertEqual(expected, abbreviated_archives)
# Further item values tests
filtered_archives = [
- p for p in period_archives['day']
- if p['period'] == (2014, 'February', 9)
+ p for p in period_archives["day"] if p["period"] == (2014, "February", 9)
]
self.assertEqual(len(filtered_archives), 1)
sample_archive = filtered_archives[0]
- self.assertEqual(sample_archive['period_num'], (2014, 2, 9))
- self.assertEqual(
- sample_archive['save_as'], 'posts/2014/Feb/09/index.html')
- self.assertEqual(
- sample_archive['url'], 'posts/2014/Feb/09/')
+ self.assertEqual(sample_archive["period_num"], (2014, 2, 9))
+ self.assertEqual(sample_archive["save_as"], "posts/2014/Feb/09/index.html")
+ self.assertEqual(sample_archive["url"], "posts/2014/Feb/09/")
articles = [
- d for d in generator.articles if
- d.date.year == 2014 and
- d.date.month == 2 and
- d.date.day == 9
+ d
+ for d in generator.articles
+ if d.date.year == 2014 and d.date.month == 2 and d.date.day == 9
]
- self.assertEqual(len(sample_archive['articles']), len(articles))
+ self.assertEqual(len(sample_archive["articles"]), len(articles))
dates = [
- d for d in generator.dates if
- d.date.year == 2014 and
- d.date.month == 2 and
- d.date.day == 9
+ d
+ for d in generator.dates
+ if d.date.year == 2014 and d.date.month == 2 and d.date.day == 9
]
- self.assertEqual(len(sample_archive['dates']), len(dates))
- self.assertEqual(sample_archive['dates'][0].title, dates[0].title)
- self.assertEqual(sample_archive['dates'][0].date, dates[0].date)
+ self.assertEqual(len(sample_archive["dates"]), len(dates))
+ self.assertEqual(sample_archive["dates"][0].title, dates[0].title)
+ self.assertEqual(sample_archive["dates"][0].date, dates[0].date)
def test_period_in_timeperiod_archive(self):
"""
@@ -531,13 +654,17 @@ def test_period_in_timeperiod_archive(self):
"""
settings = get_settings()
- settings['YEAR_ARCHIVE_SAVE_AS'] = 'posts/{date:%Y}/index.html'
- settings['YEAR_ARCHIVE_URL'] = 'posts/{date:%Y}/'
- settings['CACHE_PATH'] = self.temp_cache
+ settings["YEAR_ARCHIVE_SAVE_AS"] = "posts/{date:%Y}/index.html"
+ settings["YEAR_ARCHIVE_URL"] = "posts/{date:%Y}/"
+ settings["CACHE_PATH"] = self.temp_cache
context = get_context(settings)
generator = ArticlesGenerator(
- context=context, settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
write = MagicMock()
generator.generate_period_archives(write)
@@ -547,196 +674,257 @@ def test_period_in_timeperiod_archive(self):
# among other things it must have at least been called with this
context["period"] = (1970,)
context["period_num"] = (1970,)
- write.assert_called_with("posts/1970/index.html",
- generator.get_template("period_archives"),
- context, blog=True, articles=articles,
- dates=dates, template_name='period_archives',
- url="posts/1970/",
- all_articles=generator.articles)
-
- settings['MONTH_ARCHIVE_SAVE_AS'] = \
- 'posts/{date:%Y}/{date:%b}/index.html'
- settings['MONTH_ARCHIVE_URL'] = \
- 'posts/{date:%Y}/{date:%b}/'
+ write.assert_called_with(
+ "posts/1970/index.html",
+ generator.get_template("period_archives"),
+ context,
+ blog=True,
+ articles=articles,
+ dates=dates,
+ template_name="period_archives",
+ url="posts/1970/",
+ all_articles=generator.articles,
+ )
+
+ settings["MONTH_ARCHIVE_SAVE_AS"] = "posts/{date:%Y}/{date:%b}/index.html"
+ settings["MONTH_ARCHIVE_URL"] = "posts/{date:%Y}/{date:%b}/"
context = get_context(settings)
generator = ArticlesGenerator(
- context=context, settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
write = MagicMock()
generator.generate_period_archives(write)
- dates = [d for d in generator.dates
- if d.date.year == 1970 and d.date.month == 1]
- articles = [d for d in generator.articles
- if d.date.year == 1970 and d.date.month == 1]
+ dates = [
+ d for d in generator.dates if d.date.year == 1970 and d.date.month == 1
+ ]
+ articles = [
+ d for d in generator.articles if d.date.year == 1970 and d.date.month == 1
+ ]
self.assertEqual(len(dates), 1)
context["period"] = (1970, "January")
context["period_num"] = (1970, 1)
# among other things it must have at least been called with this
- write.assert_called_with("posts/1970/Jan/index.html",
- generator.get_template("period_archives"),
- context, blog=True, articles=articles,
- dates=dates, template_name='period_archives',
- url="posts/1970/Jan/",
- all_articles=generator.articles)
-
- settings['DAY_ARCHIVE_SAVE_AS'] = \
- 'posts/{date:%Y}/{date:%b}/{date:%d}/index.html'
- settings['DAY_ARCHIVE_URL'] = \
- 'posts/{date:%Y}/{date:%b}/{date:%d}/'
+ write.assert_called_with(
+ "posts/1970/Jan/index.html",
+ generator.get_template("period_archives"),
+ context,
+ blog=True,
+ articles=articles,
+ dates=dates,
+ template_name="period_archives",
+ url="posts/1970/Jan/",
+ all_articles=generator.articles,
+ )
+
+ settings[
+ "DAY_ARCHIVE_SAVE_AS"
+ ] = "posts/{date:%Y}/{date:%b}/{date:%d}/index.html"
+ settings["DAY_ARCHIVE_URL"] = "posts/{date:%Y}/{date:%b}/{date:%d}/"
context = get_context(settings)
generator = ArticlesGenerator(
- context=context, settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
write = MagicMock()
generator.generate_period_archives(write)
dates = [
- d for d in generator.dates if
- d.date.year == 1970 and
- d.date.month == 1 and
- d.date.day == 1
+ d
+ for d in generator.dates
+ if d.date.year == 1970 and d.date.month == 1 and d.date.day == 1
]
articles = [
- d for d in generator.articles if
- d.date.year == 1970 and
- d.date.month == 1 and
- d.date.day == 1
+ d
+ for d in generator.articles
+ if d.date.year == 1970 and d.date.month == 1 and d.date.day == 1
]
self.assertEqual(len(dates), 1)
context["period"] = (1970, "January", 1)
context["period_num"] = (1970, 1, 1)
# among other things it must have at least been called with this
- write.assert_called_with("posts/1970/Jan/01/index.html",
- generator.get_template("period_archives"),
- context, blog=True, articles=articles,
- dates=dates, template_name='period_archives',
- url="posts/1970/Jan/01/",
- all_articles=generator.articles)
+ write.assert_called_with(
+ "posts/1970/Jan/01/index.html",
+ generator.get_template("period_archives"),
+ context,
+ blog=True,
+ articles=articles,
+ dates=dates,
+ template_name="period_archives",
+ url="posts/1970/Jan/01/",
+ all_articles=generator.articles,
+ )
def test_nonexistent_template(self):
"""Attempt to load a non-existent template"""
settings = get_settings()
context = get_context(settings)
generator = ArticlesGenerator(
- context=context, settings=settings,
- path=None, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=None,
+ theme=settings["THEME"],
+ output_path=None,
+ )
self.assertRaises(Exception, generator.get_template, "not_a_template")
def test_generate_authors(self):
"""Check authors generation."""
authors = [author.name for author, _ in self.generator.authors]
authors_expected = sorted(
- ['Alexis Métaireau', 'Author, First', 'Author, Second',
- 'First Author', 'Second Author'])
+ [
+ "Alexis Métaireau",
+ "Author, First",
+ "Author, Second",
+ "First Author",
+ "Second Author",
+ ]
+ )
self.assertEqual(sorted(authors), authors_expected)
# test for slug
authors = [author.slug for author, _ in self.generator.authors]
- authors_expected = ['alexis-metaireau', 'author-first',
- 'author-second', 'first-author', 'second-author']
+ authors_expected = [
+ "alexis-metaireau",
+ "author-first",
+ "author-second",
+ "first-author",
+ "second-author",
+ ]
self.assertEqual(sorted(authors), sorted(authors_expected))
def test_standard_metadata_in_default_metadata(self):
settings = get_settings()
- settings['CACHE_CONTENT'] = False
- settings['DEFAULT_CATEGORY'] = 'Default'
- settings['DEFAULT_DATE'] = (1970, 1, 1)
- settings['DEFAULT_METADATA'] = (('author', 'Blogger'),
- # category will be ignored in favor of
- # DEFAULT_CATEGORY
- ('category', 'Random'),
- ('tags', 'general, untagged'))
+ settings["CACHE_CONTENT"] = False
+ settings["DEFAULT_CATEGORY"] = "Default"
+ settings["DEFAULT_DATE"] = (1970, 1, 1)
+ settings["DEFAULT_METADATA"] = (
+ ("author", "Blogger"),
+ # category will be ignored in favor of
+ # DEFAULT_CATEGORY
+ ("category", "Random"),
+ ("tags", "general, untagged"),
+ )
context = get_context(settings)
generator = ArticlesGenerator(
- context=context, settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
authors = sorted([author.name for author, _ in generator.authors])
- authors_expected = sorted(['Alexis Métaireau', 'Blogger',
- 'Author, First', 'Author, Second',
- 'First Author', 'Second Author'])
+ authors_expected = sorted(
+ [
+ "Alexis Métaireau",
+ "Blogger",
+ "Author, First",
+ "Author, Second",
+ "First Author",
+ "Second Author",
+ ]
+ )
self.assertEqual(authors, authors_expected)
- categories = sorted([category.name
- for category, _ in generator.categories])
+ categories = sorted([category.name for category, _ in generator.categories])
categories_expected = [
- sorted(['Default', 'TestCategory', 'yeah', 'test', '指導書']),
- sorted(['Default', 'TestCategory', 'Yeah', 'test', '指導書'])]
+ sorted(["Default", "TestCategory", "yeah", "test", "指導書"]),
+ sorted(["Default", "TestCategory", "Yeah", "test", "指導書"]),
+ ]
self.assertIn(categories, categories_expected)
tags = sorted([tag.name for tag in generator.tags])
- tags_expected = sorted(['bar', 'foo', 'foobar', 'general', 'untagged',
- 'パイソン', 'マック'])
+ tags_expected = sorted(
+ ["bar", "foo", "foobar", "general", "untagged", "パイソン", "マック"]
+ )
self.assertEqual(tags, tags_expected)
def test_article_order_by(self):
settings = get_settings()
- settings['DEFAULT_CATEGORY'] = 'Default'
- settings['DEFAULT_DATE'] = (1970, 1, 1)
- settings['ARTICLE_ORDER_BY'] = 'title'
+ settings["DEFAULT_CATEGORY"] = "Default"
+ settings["DEFAULT_DATE"] = (1970, 1, 1)
+ settings["ARTICLE_ORDER_BY"] = "title"
context = get_context(settings)
generator = ArticlesGenerator(
- context=context, settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
expected = [
- 'An Article With Code Block To Test Typogrify Ignore',
- 'Article title',
- 'Article with Nonconformant HTML meta tags',
- 'Article with an inline SVG',
- 'Article with markdown and empty tags',
- 'Article with markdown and nested summary metadata',
- 'Article with markdown and summary metadata multi',
- 'Article with markdown and summary metadata single',
- 'Article with markdown containing footnotes',
- 'Article with template',
- 'Metadata tags as list!',
- 'One -, two --, three --- dashes!',
- 'One -, two --, three --- dashes!',
- 'Rst with filename metadata',
- 'Test Markdown extensions',
- 'Test markdown File',
- 'Test md File',
- 'Test mdown File',
- 'Test metadata duplicates',
- 'Test mkd File',
- 'This is a super article !',
- 'This is a super article !',
- 'This is a super article !',
- 'This is a super article !',
- 'This is a super article !',
- 'This is a super article !',
- 'This is a super article !',
- 'This is a super article !',
- 'This is a super article !',
- 'This is a super article !',
- 'This is a super article !',
- 'This is an article with category !',
- ('This is an article with multiple authors in lastname, '
- 'firstname format!'),
- 'This is an article with multiple authors in list format!',
- 'This is an article with multiple authors!',
- 'This is an article with multiple authors!',
- 'This is an article without category !',
- 'This is an article without category !',
- 'マックOS X 10.8でパイソンとVirtualenvをインストールと設定']
+ "An Article With Code Block To Test Typogrify Ignore",
+ "Article title",
+ "Article with Nonconformant HTML meta tags",
+ "Article with an inline SVG",
+ "Article with markdown and empty tags",
+ "Article with markdown and nested summary metadata",
+ "Article with markdown and summary metadata multi",
+ "Article with markdown and summary metadata single",
+ "Article with markdown containing footnotes",
+ "Article with template",
+ "Metadata tags as list!",
+ "One -, two --, three --- dashes!",
+ "One -, two --, three --- dashes!",
+ "Rst with filename metadata",
+ "Test Markdown extensions",
+ "Test markdown File",
+ "Test md File",
+ "Test mdown File",
+ "Test metadata duplicates",
+ "Test mkd File",
+ "This is a super article !",
+ "This is a super article !",
+ "This is a super article !",
+ "This is a super article !",
+ "This is a super article !",
+ "This is a super article !",
+ "This is a super article !",
+ "This is a super article !",
+ "This is a super article !",
+ "This is a super article !",
+ "This is a super article !",
+ "This is an article with category !",
+ (
+ "This is an article with multiple authors in lastname, "
+ "firstname format!"
+ ),
+ "This is an article with multiple authors in list format!",
+ "This is an article with multiple authors!",
+ "This is an article with multiple authors!",
+ "This is an article without category !",
+ "This is an article without category !",
+ "マックOS X 10.8でパイソンとVirtualenvをインストールと設定",
+ ]
articles = [article.title for article in generator.articles]
self.assertEqual(articles, expected)
# reversed title
settings = get_settings()
- settings['DEFAULT_CATEGORY'] = 'Default'
- settings['DEFAULT_DATE'] = (1970, 1, 1)
- settings['ARTICLE_ORDER_BY'] = 'reversed-title'
+ settings["DEFAULT_CATEGORY"] = "Default"
+ settings["DEFAULT_DATE"] = (1970, 1, 1)
+ settings["ARTICLE_ORDER_BY"] = "reversed-title"
context = get_context(settings)
generator = ArticlesGenerator(
- context=context, settings=settings,
- path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CONTENT_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
articles = [article.title for article in generator.articles]
@@ -750,7 +938,7 @@ class TestPageGenerator(unittest.TestCase):
# to match expected
def setUp(self):
- self.temp_cache = mkdtemp(prefix='pelican_cache.')
+ self.temp_cache = mkdtemp(prefix="pelican_cache.")
def tearDown(self):
rmtree(self.temp_cache)
@@ -760,112 +948,125 @@ def distill_pages(self, pages):
def test_generate_context(self):
settings = get_settings()
- settings['CACHE_PATH'] = self.temp_cache
- settings['PAGE_PATHS'] = ['TestPages'] # relative to CUR_DIR
- settings['DEFAULT_DATE'] = (1970, 1, 1)
+ settings["CACHE_PATH"] = self.temp_cache
+ settings["PAGE_PATHS"] = ["TestPages"] # relative to CUR_DIR
+ settings["DEFAULT_DATE"] = (1970, 1, 1)
context = get_context(settings)
generator = PagesGenerator(
- context=context, settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
pages = self.distill_pages(generator.pages)
hidden_pages = self.distill_pages(generator.hidden_pages)
draft_pages = self.distill_pages(generator.draft_pages)
pages_expected = [
- ['This is a test page', 'published', 'page'],
- ['This is a markdown test page', 'published', 'page'],
- ['This is a test page with a preset template', 'published',
- 'custom'],
- ['Page with a bunch of links', 'published', 'page'],
- ['Page with static links', 'published', 'page'],
- ['A Page (Test) for sorting', 'published', 'page'],
+ ["This is a test page", "published", "page"],
+ ["This is a markdown test page", "published", "page"],
+ ["This is a test page with a preset template", "published", "custom"],
+ ["Page with a bunch of links", "published", "page"],
+ ["Page with static links", "published", "page"],
+ ["A Page (Test) for sorting", "published", "page"],
]
hidden_pages_expected = [
- ['This is a test hidden page', 'hidden', 'page'],
- ['This is a markdown test hidden page', 'hidden', 'page'],
- ['This is a test hidden page with a custom template', 'hidden',
- 'custom'],
+ ["This is a test hidden page", "hidden", "page"],
+ ["This is a markdown test hidden page", "hidden", "page"],
+ ["This is a test hidden page with a custom template", "hidden", "custom"],
]
draft_pages_expected = [
- ['This is a test draft page', 'draft', 'page'],
- ['This is a markdown test draft page', 'draft', 'page'],
- ['This is a test draft page with a custom template', 'draft',
- 'custom'],
+ ["This is a test draft page", "draft", "page"],
+ ["This is a markdown test draft page", "draft", "page"],
+ ["This is a test draft page with a custom template", "draft", "custom"],
]
self.assertEqual(sorted(pages_expected), sorted(pages))
self.assertEqual(
sorted(pages_expected),
- sorted(self.distill_pages(generator.context['pages'])))
+ sorted(self.distill_pages(generator.context["pages"])),
+ )
self.assertEqual(sorted(hidden_pages_expected), sorted(hidden_pages))
self.assertEqual(sorted(draft_pages_expected), sorted(draft_pages))
self.assertEqual(
sorted(hidden_pages_expected),
- sorted(self.distill_pages(generator.context['hidden_pages'])))
+ sorted(self.distill_pages(generator.context["hidden_pages"])),
+ )
self.assertEqual(
sorted(draft_pages_expected),
- sorted(self.distill_pages(generator.context['draft_pages'])))
+ sorted(self.distill_pages(generator.context["draft_pages"])),
+ )
def test_generate_sorted(self):
settings = get_settings()
- settings['PAGE_PATHS'] = ['TestPages'] # relative to CUR_DIR
- settings['CACHE_PATH'] = self.temp_cache
- settings['DEFAULT_DATE'] = (1970, 1, 1)
+ settings["PAGE_PATHS"] = ["TestPages"] # relative to CUR_DIR
+ settings["CACHE_PATH"] = self.temp_cache
+ settings["DEFAULT_DATE"] = (1970, 1, 1)
context = get_context(settings)
# default sort (filename)
pages_expected_sorted_by_filename = [
- ['This is a test page', 'published', 'page'],
- ['This is a markdown test page', 'published', 'page'],
- ['A Page (Test) for sorting', 'published', 'page'],
- ['Page with a bunch of links', 'published', 'page'],
- ['Page with static links', 'published', 'page'],
- ['This is a test page with a preset template', 'published',
- 'custom'],
+ ["This is a test page", "published", "page"],
+ ["This is a markdown test page", "published", "page"],
+ ["A Page (Test) for sorting", "published", "page"],
+ ["Page with a bunch of links", "published", "page"],
+ ["Page with static links", "published", "page"],
+ ["This is a test page with a preset template", "published", "custom"],
]
generator = PagesGenerator(
- context=context, settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
pages = self.distill_pages(generator.pages)
self.assertEqual(pages_expected_sorted_by_filename, pages)
# sort by title
pages_expected_sorted_by_title = [
- ['A Page (Test) for sorting', 'published', 'page'],
- ['Page with a bunch of links', 'published', 'page'],
- ['Page with static links', 'published', 'page'],
- ['This is a markdown test page', 'published', 'page'],
- ['This is a test page', 'published', 'page'],
- ['This is a test page with a preset template', 'published',
- 'custom'],
+ ["A Page (Test) for sorting", "published", "page"],
+ ["Page with a bunch of links", "published", "page"],
+ ["Page with static links", "published", "page"],
+ ["This is a markdown test page", "published", "page"],
+ ["This is a test page", "published", "page"],
+ ["This is a test page with a preset template", "published", "custom"],
]
- settings['PAGE_ORDER_BY'] = 'title'
+ settings["PAGE_ORDER_BY"] = "title"
context = get_context(settings)
generator = PagesGenerator(
- context=context.copy(), settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context.copy(),
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
pages = self.distill_pages(generator.pages)
self.assertEqual(pages_expected_sorted_by_title, pages)
# sort by title reversed
pages_expected_sorted_by_title = [
- ['This is a test page with a preset template', 'published',
- 'custom'],
- ['This is a test page', 'published', 'page'],
- ['This is a markdown test page', 'published', 'page'],
- ['Page with static links', 'published', 'page'],
- ['Page with a bunch of links', 'published', 'page'],
- ['A Page (Test) for sorting', 'published', 'page'],
+ ["This is a test page with a preset template", "published", "custom"],
+ ["This is a test page", "published", "page"],
+ ["This is a markdown test page", "published", "page"],
+ ["Page with static links", "published", "page"],
+ ["Page with a bunch of links", "published", "page"],
+ ["A Page (Test) for sorting", "published", "page"],
]
- settings['PAGE_ORDER_BY'] = 'reversed-title'
+ settings["PAGE_ORDER_BY"] = "reversed-title"
context = get_context(settings)
generator = PagesGenerator(
- context=context, settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
pages = self.distill_pages(generator.pages)
self.assertEqual(pages_expected_sorted_by_title, pages)
@@ -876,18 +1077,22 @@ def test_tag_and_category_links_on_generated_pages(self):
are generated correctly on pages
"""
settings = get_settings()
- settings['PAGE_PATHS'] = ['TestPages'] # relative to CUR_DIR
- settings['CACHE_PATH'] = self.temp_cache
- settings['DEFAULT_DATE'] = (1970, 1, 1)
+ settings["PAGE_PATHS"] = ["TestPages"] # relative to CUR_DIR
+ settings["CACHE_PATH"] = self.temp_cache
+ settings["DEFAULT_DATE"] = (1970, 1, 1)
context = get_context(settings)
generator = PagesGenerator(
- context=context, settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
pages_by_title = {p.title: p for p in generator.pages}
- test_content = pages_by_title['Page with a bunch of links'].content
+ test_content = pages_by_title["Page with a bunch of links"].content
self.assertIn('<a href="/category/yeah.html">', test_content)
self.assertIn('<a href="/tag/matsuku.html">', test_content)
@@ -897,80 +1102,80 @@ def test_static_and_attach_links_on_generated_pages(self):
are included in context['static_links']
"""
settings = get_settings()
- settings['PAGE_PATHS'] = ['TestPages/page_with_static_links.md']
- settings['CACHE_PATH'] = self.temp_cache
- settings['DEFAULT_DATE'] = (1970, 1, 1)
+ settings["PAGE_PATHS"] = ["TestPages/page_with_static_links.md"]
+ settings["CACHE_PATH"] = self.temp_cache
+ settings["DEFAULT_DATE"] = (1970, 1, 1)
context = get_context(settings)
generator = PagesGenerator(
- context=context, settings=settings,
- path=CUR_DIR, theme=settings['THEME'], output_path=None)
+ context=context,
+ settings=settings,
+ path=CUR_DIR,
+ theme=settings["THEME"],
+ output_path=None,
+ )
generator.generate_context()
- self.assertIn('pelican/tests/TestPages/image0.jpg',
- context['static_links'])
- self.assertIn('pelican/tests/TestPages/image1.jpg',
- context['static_links'])
+ self.assertIn("pelican/tests/TestPages/image0.jpg", context["static_links"])
+ self.assertIn("pelican/tests/TestPages/image1.jpg", context["static_links"])
class TestTemplatePagesGenerator(TestCaseWithCLocale):
-
TEMPLATE_CONTENT = "foo: {{ foo }}"
def setUp(self):
super().setUp()
- self.temp_content = mkdtemp(prefix='pelicantests.')
- self.temp_output = mkdtemp(prefix='pelicantests.')
+ self.temp_content = mkdtemp(prefix="pelicantests.")
+ self.temp_output = mkdtemp(prefix="pelicantests.")
def tearDown(self):
rmtree(self.temp_content)
rmtree(self.temp_output)
def test_generate_output(self):
-
settings = get_settings()
- settings['STATIC_PATHS'] = ['static']
- settings['TEMPLATE_PAGES'] = {
- 'template/source.html': 'generated/file.html'
- }
+ settings["STATIC_PATHS"] = ["static"]
+ settings["TEMPLATE_PAGES"] = {"template/source.html": "generated/file.html"}
generator = TemplatePagesGenerator(
- context={'foo': 'bar'}, settings=settings,
- path=self.temp_content, theme='', output_path=self.temp_output)
+ context={"foo": "bar"},
+ settings=settings,
+ path=self.temp_content,
+ theme="",
+ output_path=self.temp_output,
+ )
# create a dummy template file
- template_dir = os.path.join(self.temp_content, 'template')
- template_path = os.path.join(template_dir, 'source.html')
+ template_dir = os.path.join(self.temp_content, "template")
+ template_path = os.path.join(template_dir, "source.html")
os.makedirs(template_dir)
- with open(template_path, 'w') as template_file:
+ with open(template_path, "w") as template_file:
template_file.write(self.TEMPLATE_CONTENT)
writer = Writer(self.temp_output, settings=settings)
generator.generate_output(writer)
- output_path = os.path.join(self.temp_output, 'generated', 'file.html')
+ output_path = os.path.join(self.temp_output, "generated", "file.html")
# output file has been generated
self.assertTrue(os.path.exists(output_path))
# output content is correct
with open(output_path) as output_file:
- self.assertEqual(output_file.read(), 'foo: bar')
+ self.assertEqual(output_file.read(), "foo: bar")
class TestStaticGenerator(unittest.TestCase):
-
def setUp(self):
- self.content_path = os.path.join(CUR_DIR, 'mixed_content')
- self.temp_content = mkdtemp(prefix='testcontent.')
- self.temp_output = mkdtemp(prefix='testoutput.')
+ self.content_path = os.path.join(CUR_DIR, "mixed_content")
+ self.temp_content = mkdtemp(prefix="testcontent.")
+ self.temp_output = mkdtemp(prefix="testoutput.")
self.settings = get_settings()
- self.settings['PATH'] = self.temp_content
- self.settings['STATIC_PATHS'] = ["static"]
- self.settings['OUTPUT_PATH'] = self.temp_output
+ self.settings["PATH"] = self.temp_content
+ self.settings["STATIC_PATHS"] = ["static"]
+ self.settings["OUTPUT_PATH"] = self.temp_output
os.mkdir(os.path.join(self.temp_content, "static"))
- self.startfile = os.path.join(self.temp_content,
- "static", "staticfile")
+ self.startfile = os.path.join(self.temp_content, "static", "staticfile")
self.endfile = os.path.join(self.temp_output, "static", "staticfile")
self.generator = StaticGenerator(
context=get_context(),
@@ -978,7 +1183,7 @@ def setUp(self):
path=self.temp_content,
theme="",
output_path=self.temp_output,
- )
+ )
def tearDown(self):
rmtree(self.temp_content)
@@ -989,155 +1194,198 @@ def set_ancient_mtime(self, path, timestamp=1):
def test_theme_static_paths_dirs(self):
"""Test that StaticGenerator properly copies also files mentioned in
- TEMPLATE_STATIC_PATHS, not just directories."""
+ TEMPLATE_STATIC_PATHS, not just directories."""
settings = get_settings(PATH=self.content_path)
context = get_context(settings, staticfiles=[])
StaticGenerator(
- context=context, settings=settings,
- path=settings['PATH'], output_path=self.temp_output,
- theme=settings['THEME']).generate_output(None)
+ context=context,
+ settings=settings,
+ path=settings["PATH"],
+ output_path=self.temp_output,
+ theme=settings["THEME"],
+ ).generate_output(None)
# The content of dirs listed in THEME_STATIC_PATHS (defaulting to
# "static") is put into the output
- self.assertTrue(os.path.isdir(os.path.join(self.temp_output,
- "theme/css/")))
- self.assertTrue(os.path.isdir(os.path.join(self.temp_output,
- "theme/fonts/")))
+ self.assertTrue(os.path.isdir(os.path.join(self.temp_output, "theme/css/")))
+ self.assertTrue(os.path.isdir(os.path.join(self.temp_output, "theme/fonts/")))
def test_theme_static_paths_files(self):
"""Test that StaticGenerator properly copies also files mentioned in
- TEMPLATE_STATIC_PATHS, not just directories."""
+ TEMPLATE_STATIC_PATHS, not just directories."""
settings = get_settings(
PATH=self.content_path,
- THEME_STATIC_PATHS=['static/css/fonts.css', 'static/fonts/'],)
+ THEME_STATIC_PATHS=["static/css/fonts.css", "static/fonts/"],
+ )
context = get_context(settings, staticfiles=[])
StaticGenerator(
- context=context, settings=settings,
- path=settings['PATH'], output_path=self.temp_output,
- theme=settings['THEME']).generate_output(None)
+ context=context,
+ settings=settings,
+ path=settings["PATH"],
+ output_path=self.temp_output,
+ theme=settings["THEME"],
+ ).generate_output(None)
# Only the content of dirs and files listed in THEME_STATIC_PATHS are
# put into the output, not everything from static/
- self.assertFalse(os.path.isdir(os.path.join(self.temp_output,
- "theme/css/")))
- self.assertFalse(os.path.isdir(os.path.join(self.temp_output,
- "theme/fonts/")))
-
- self.assertTrue(os.path.isfile(os.path.join(
- self.temp_output, "theme/Yanone_Kaffeesatz_400.eot")))
- self.assertTrue(os.path.isfile(os.path.join(
- self.temp_output, "theme/Yanone_Kaffeesatz_400.svg")))
- self.assertTrue(os.path.isfile(os.path.join(
- self.temp_output, "theme/Yanone_Kaffeesatz_400.ttf")))
- self.assertTrue(os.path.isfile(os.path.join(
- self.temp_output, "theme/Yanone_Kaffeesatz_400.woff")))
- self.assertTrue(os.path.isfile(os.path.join(
- self.temp_output, "theme/Yanone_Kaffeesatz_400.woff2")))
- self.assertTrue(os.path.isfile(os.path.join(self.temp_output,
- "theme/font.css")))
- self.assertTrue(os.path.isfile(os.path.join(self.temp_output,
- "theme/fonts.css")))
+ self.assertFalse(os.path.isdir(os.path.join(self.temp_output, "theme/css/")))
+ self.assertFalse(os.path.isdir(os.path.join(self.temp_output, "theme/fonts/")))
+
+ self.assertTrue(
+ os.path.isfile(
+ os.path.join(self.temp_output, "theme/Yanone_Kaffeesatz_400.eot")
+ )
+ )
+ self.assertTrue(
+ os.path.isfile(
+ os.path.join(self.temp_output, "theme/Yanone_Kaffeesatz_400.svg")
+ )
+ )
+ self.assertTrue(
+ os.path.isfile(
+ os.path.join(self.temp_output, "theme/Yanone_Kaffeesatz_400.ttf")
+ )
+ )
+ self.assertTrue(
+ os.path.isfile(
+ os.path.join(self.temp_output, "theme/Yanone_Kaffeesatz_400.woff")
+ )
+ )
+ self.assertTrue(
+ os.path.isfile(
+ os.path.join(self.temp_output, "theme/Yanone_Kaffeesatz_400.woff2")
+ )
+ )
+ self.assertTrue(
+ os.path.isfile(os.path.join(self.temp_output, "theme/font.css"))
+ )
+ self.assertTrue(
+ os.path.isfile(os.path.join(self.temp_output, "theme/fonts.css"))
+ )
def test_static_excludes(self):
- """Test that StaticGenerator respects STATIC_EXCLUDES.
- """
+ """Test that StaticGenerator respects STATIC_EXCLUDES."""
settings = get_settings(
- STATIC_EXCLUDES=['subdir'],
+ STATIC_EXCLUDES=["subdir"],
PATH=self.content_path,
- STATIC_PATHS=[''],)
+ STATIC_PATHS=[""],
+ )
context = get_context(settings)
StaticGenerator(
- context=context, settings=settings,
- path=settings['PATH'], output_path=self.temp_output,
- theme=settings['THEME']).generate_context()
+ context=context,
+ settings=settings,
+ path=settings["PATH"],
+ output_path=self.temp_output,
+ theme=settings["THEME"],
+ ).generate_context()
- staticnames = [os.path.basename(c.source_path)
- for c in context['staticfiles']]
+ staticnames = [os.path.basename(c.source_path) for c in context["staticfiles"]]
self.assertNotIn(
- 'subdir_fake_image.jpg', staticnames,
- "StaticGenerator processed a file in a STATIC_EXCLUDES directory")
+ "subdir_fake_image.jpg",
+ staticnames,
+ "StaticGenerator processed a file in a STATIC_EXCLUDES directory",
+ )
self.assertIn(
- 'fake_image.jpg', staticnames,
- "StaticGenerator skipped a file that it should have included")
+ "fake_image.jpg",
+ staticnames,
+ "StaticGenerator skipped a file that it should have included",
+ )
def test_static_exclude_sources(self):
- """Test that StaticGenerator respects STATIC_EXCLUDE_SOURCES.
- """
+ """Test that StaticGenerator respects STATIC_EXCLUDE_SOURCES."""
settings = get_settings(
STATIC_EXCLUDE_SOURCES=True,
PATH=self.content_path,
- PAGE_PATHS=[''],
- STATIC_PATHS=[''],
- CACHE_CONTENT=False,)
+ PAGE_PATHS=[""],
+ STATIC_PATHS=[""],
+ CACHE_CONTENT=False,
+ )
context = get_context(settings)
for generator_class in (PagesGenerator, StaticGenerator):
generator_class(
- context=context, settings=settings,
- path=settings['PATH'], output_path=self.temp_output,
- theme=settings['THEME']).generate_context()
+ context=context,
+ settings=settings,
+ path=settings["PATH"],
+ output_path=self.temp_output,
+ theme=settings["THEME"],
+ ).generate_context()
- staticnames = [os.path.basename(c.source_path)
- for c in context['staticfiles']]
+ staticnames = [os.path.basename(c.source_path) for c in context["staticfiles"]]
self.assertFalse(
any(name.endswith(".md") for name in staticnames),
- "STATIC_EXCLUDE_SOURCES=True failed to exclude a markdown file")
+ "STATIC_EXCLUDE_SOURCES=True failed to exclude a markdown file",
+ )
settings.update(STATIC_EXCLUDE_SOURCES=False)
context = get_context(settings)
for generator_class in (PagesGenerator, StaticGenerator):
generator_class(
- context=context, settings=settings,
- path=settings['PATH'], output_path=self.temp_output,
- theme=settings['THEME']).generate_context()
+ context=context,
+ settings=settings,
+ path=settings["PATH"],
+ output_path=self.temp_output,
+ theme=settings["THEME"],
+ ).generate_context()
- staticnames = [os.path.basename(c.source_path)
- for c in context['staticfiles']]
+ staticnames = [os.path.basename(c.source_path) for c in context["staticfiles"]]
self.assertTrue(
any(name.endswith(".md") for name in staticnames),
- "STATIC_EXCLUDE_SOURCES=False failed to include a markdown file")
+ "STATIC_EXCLUDE_SOURCES=False failed to include a markdown file",
+ )
def test_static_links(self):
- """Test that StaticGenerator uses files in static_links
- """
+ """Test that StaticGenerator uses files in static_links"""
settings = get_settings(
- STATIC_EXCLUDES=['subdir'],
+ STATIC_EXCLUDES=["subdir"],
PATH=self.content_path,
- STATIC_PATHS=[],)
+ STATIC_PATHS=[],
+ )
context = get_context(settings)
- context['static_links'] |= {'short_page.md', 'subdir_fake_image.jpg'}
+ context["static_links"] |= {"short_page.md", "subdir_fake_image.jpg"}
StaticGenerator(
- context=context, settings=settings,
- path=settings['PATH'], output_path=self.temp_output,
- theme=settings['THEME']).generate_context()
+ context=context,
+ settings=settings,
+ path=settings["PATH"],
+ output_path=self.temp_output,
+ theme=settings["THEME"],
+ ).generate_context()
staticfiles_names = [
- os.path.basename(c.source_path) for c in context['staticfiles']]
+ os.path.basename(c.source_path) for c in context["staticfiles"]
+ ]
- static_content_names = [
- os.path.basename(c) for c in context['static_content']]
+ static_content_names = [os.path.basename(c) for c in context["static_content"]]
self.assertIn(
- 'short_page.md', staticfiles_names,
- "StaticGenerator skipped a file that it should have included")
+ "short_page.md",
+ staticfiles_names,
+ "StaticGenerator skipped a file that it should have included",
+ )
self.assertIn(
- 'short_page.md', static_content_names,
- "StaticGenerator skipped a file that it should have included")
+ "short_page.md",
+ static_content_names,
+ "StaticGenerator skipped a file that it should have included",
+ )
self.assertIn(
- 'subdir_fake_image.jpg', staticfiles_names,
- "StaticGenerator skipped a file that it should have included")
+ "subdir_fake_image.jpg",
+ staticfiles_names,
+ "StaticGenerator skipped a file that it should have included",
+ )
self.assertIn(
- 'subdir_fake_image.jpg', static_content_names,
- "StaticGenerator skipped a file that it should have included")
+ "subdir_fake_image.jpg",
+ static_content_names,
+ "StaticGenerator skipped a file that it should have included",
+ )
def test_copy_one_file(self):
with open(self.startfile, "w") as f:
@@ -1160,7 +1408,7 @@ def test_dest_and_source_mtimes_are_equal(self):
staticfile = MagicMock()
staticfile.source_path = self.startfile
staticfile.save_as = self.endfile
- self.settings['STATIC_CHECK_IF_MODIFIED'] = True
+ self.settings["STATIC_CHECK_IF_MODIFIED"] = True
with open(staticfile.source_path, "w") as f:
f.write("a")
os.mkdir(os.path.join(self.temp_output, "static"))
@@ -1181,7 +1429,7 @@ def test_source_is_newer(self):
self.assertTrue(isnewer)
def test_skip_file_when_source_is_not_newer(self):
- self.settings['STATIC_CHECK_IF_MODIFIED'] = True
+ self.settings["STATIC_CHECK_IF_MODIFIED"] = True
with open(self.startfile, "w") as f:
f.write("staticcontent")
os.mkdir(os.path.join(self.temp_output, "static"))
@@ -1201,7 +1449,7 @@ def test_dont_link_by_default(self):
self.assertFalse(os.path.samefile(self.startfile, self.endfile))
def test_output_file_is_linked_to_source(self):
- self.settings['STATIC_CREATE_LINKS'] = True
+ self.settings["STATIC_CREATE_LINKS"] = True
with open(self.startfile, "w") as f:
f.write("staticcontent")
self.generator.generate_context()
@@ -1209,7 +1457,7 @@ def test_output_file_is_linked_to_source(self):
self.assertTrue(os.path.samefile(self.startfile, self.endfile))
def test_output_file_exists_and_is_newer(self):
- self.settings['STATIC_CREATE_LINKS'] = True
+ self.settings["STATIC_CREATE_LINKS"] = True
with open(self.startfile, "w") as f:
f.write("staticcontent")
os.mkdir(os.path.join(self.temp_output, "static"))
@@ -1219,9 +1467,9 @@ def test_output_file_exists_and_is_newer(self):
self.generator.generate_output(None)
self.assertTrue(os.path.samefile(self.startfile, self.endfile))
- @unittest.skipUnless(can_symlink(), 'No symlink privilege')
+ @unittest.skipUnless(can_symlink(), "No symlink privilege")
def test_can_symlink_when_hardlink_not_possible(self):
- self.settings['STATIC_CREATE_LINKS'] = True
+ self.settings["STATIC_CREATE_LINKS"] = True
with open(self.startfile, "w") as f:
f.write("staticcontent")
os.mkdir(os.path.join(self.temp_output, "static"))
@@ -1230,9 +1478,9 @@ def test_can_symlink_when_hardlink_not_possible(self):
self.generator.generate_output(None)
self.assertTrue(os.path.islink(self.endfile))
- @unittest.skipUnless(can_symlink(), 'No symlink privilege')
+ @unittest.skipUnless(can_symlink(), "No symlink privilege")
def test_existing_symlink_is_considered_up_to_date(self):
- self.settings['STATIC_CREATE_LINKS'] = True
+ self.settings["STATIC_CREATE_LINKS"] = True
with open(self.startfile, "w") as f:
f.write("staticcontent")
os.mkdir(os.path.join(self.temp_output, "static"))
@@ -1243,9 +1491,9 @@ def test_existing_symlink_is_considered_up_to_date(self):
requires_update = self.generator._file_update_required(staticfile)
self.assertFalse(requires_update)
- @unittest.skipUnless(can_symlink(), 'No symlink privilege')
+ @unittest.skipUnless(can_symlink(), "No symlink privilege")
def test_invalid_symlink_is_overwritten(self):
- self.settings['STATIC_CREATE_LINKS'] = True
+ self.settings["STATIC_CREATE_LINKS"] = True
with open(self.startfile, "w") as f:
f.write("staticcontent")
os.mkdir(os.path.join(self.temp_output, "static"))
@@ -1263,14 +1511,14 @@ def test_invalid_symlink_is_overwritten(self):
# os.path.realpath is broken on Windows before python3.8 for symlinks.
# This is a (ugly) workaround.
# see: https://bugs.python.org/issue9949
- if os.name == 'nt' and sys.version_info < (3, 8):
+ if os.name == "nt" and sys.version_info < (3, 8):
+
def get_real_path(path):
return os.readlink(path) if os.path.islink(path) else path
else:
get_real_path = os.path.realpath
- self.assertEqual(get_real_path(self.endfile),
- get_real_path(self.startfile))
+ self.assertEqual(get_real_path(self.endfile), get_real_path(self.startfile))
def test_delete_existing_file_before_mkdir(self):
with open(self.startfile, "w") as f:
@@ -1279,16 +1527,14 @@ def test_delete_existing_file_before_mkdir(self):
f.write("This file should be a directory")
self.generator.generate_context()
self.generator.generate_output(None)
- self.assertTrue(
- os.path.isdir(os.path.join(self.temp_output, "static")))
+ self.assertTrue(os.path.isdir(os.path.join(self.temp_output, "static")))
self.assertTrue(os.path.isfile(self.endfile))
class TestJinja2Environment(TestCaseWithCLocale):
-
def setUp(self):
- self.temp_content = mkdtemp(prefix='pelicantests.')
- self.temp_output = mkdtemp(prefix='pelicantests.')
+ self.temp_content = mkdtemp(prefix="pelicantests.")
+ self.temp_output = mkdtemp(prefix="pelicantests.")
def tearDown(self):
rmtree(self.temp_content)
@@ -1296,27 +1542,29 @@ def tearDown(self):
def _test_jinja2_helper(self, additional_settings, content, expected):
settings = get_settings()
- settings['STATIC_PATHS'] = ['static']
- settings['TEMPLATE_PAGES'] = {
- 'template/source.html': 'generated/file.html'
- }
+ settings["STATIC_PATHS"] = ["static"]
+ settings["TEMPLATE_PAGES"] = {"template/source.html": "generated/file.html"}
settings.update(additional_settings)
generator = TemplatePagesGenerator(
- context={'foo': 'foo', 'bar': 'bar'}, settings=settings,
- path=self.temp_content, theme='', output_path=self.temp_output)
+ context={"foo": "foo", "bar": "bar"},
+ settings=settings,
+ path=self.temp_content,
+ theme="",
+ output_path=self.temp_output,
+ )
# create a dummy template file
- template_dir = os.path.join(self.temp_content, 'template')
- template_path = os.path.join(template_dir, 'source.html')
+ template_dir = os.path.join(self.temp_content, "template")
+ template_path = os.path.join(template_dir, "source.html")
os.makedirs(template_dir)
- with open(template_path, 'w') as template_file:
+ with open(template_path, "w") as template_file:
template_file.write(content)
writer = Writer(self.temp_output, settings=settings)
generator.generate_output(writer)
- output_path = os.path.join(self.temp_output, 'generated', 'file.html')
+ output_path = os.path.join(self.temp_output, "generated", "file.html")
# output file has been generated
self.assertTrue(os.path.exists(output_path))
@@ -1327,32 +1575,32 @@ def _test_jinja2_helper(self, additional_settings, content, expected):
def test_jinja2_filter(self):
"""JINJA_FILTERS adds custom filters to Jinja2 environment"""
- content = 'foo: {{ foo|custom_filter }}, bar: {{ bar|custom_filter }}'
- settings = {'JINJA_FILTERS': {'custom_filter': lambda x: x.upper()}}
- expected = 'foo: FOO, bar: BAR'
+ content = "foo: {{ foo|custom_filter }}, bar: {{ bar|custom_filter }}"
+ settings = {"JINJA_FILTERS": {"custom_filter": lambda x: x.upper()}}
+ expected = "foo: FOO, bar: BAR"
self._test_jinja2_helper(settings, content, expected)
def test_jinja2_test(self):
"""JINJA_TESTS adds custom tests to Jinja2 environment"""
- content = 'foo {{ foo is custom_test }}, bar {{ bar is custom_test }}'
- settings = {'JINJA_TESTS': {'custom_test': lambda x: x == 'bar'}}
- expected = 'foo False, bar True'
+ content = "foo {{ foo is custom_test }}, bar {{ bar is custom_test }}"
+ settings = {"JINJA_TESTS": {"custom_test": lambda x: x == "bar"}}
+ expected = "foo False, bar True"
self._test_jinja2_helper(settings, content, expected)
def test_jinja2_global(self):
"""JINJA_GLOBALS adds custom globals to Jinja2 environment"""
- content = '{{ custom_global }}'
- settings = {'JINJA_GLOBALS': {'custom_global': 'foobar'}}
- expected = 'foobar'
+ content = "{{ custom_global }}"
+ settings = {"JINJA_GLOBALS": {"custom_global": "foobar"}}
+ expected = "foobar"
self._test_jinja2_helper(settings, content, expected)
def test_jinja2_extension(self):
"""JINJA_ENVIRONMENT adds extensions to Jinja2 environment"""
- content = '{% set stuff = [] %}{% do stuff.append(1) %}{{ stuff }}'
- settings = {'JINJA_ENVIRONMENT': {'extensions': ['jinja2.ext.do']}}
- expected = '[1]'
+ content = "{% set stuff = [] %}{% do stuff.append(1) %}{{ stuff }}"
+ settings = {"JINJA_ENVIRONMENT": {"extensions": ["jinja2.ext.do"]}}
+ expected = "[1]"
self._test_jinja2_helper(settings, content, expected)
diff --git a/pelican/tests/test_importer.py b/pelican/tests/test_importer.py
--- a/pelican/tests/test_importer.py
+++ b/pelican/tests/test_importer.py
@@ -4,26 +4,35 @@
from unittest.mock import patch
from pelican.settings import DEFAULT_CONFIG
-from pelican.tests.support import (mute, skipIfNoExecutable, temporary_folder,
- unittest, TestCaseWithCLocale)
-from pelican.tools.pelican_import import (blogger2fields, build_header,
- build_markdown_header,
- decode_wp_content,
- download_attachments, fields2pelican,
- get_attachments, tumblr2fields,
- wp2fields,
- )
+from pelican.tests.support import (
+ mute,
+ skipIfNoExecutable,
+ temporary_folder,
+ unittest,
+ TestCaseWithCLocale,
+)
+from pelican.tools.pelican_import import (
+ blogger2fields,
+ build_header,
+ build_markdown_header,
+ decode_wp_content,
+ download_attachments,
+ fields2pelican,
+ get_attachments,
+ tumblr2fields,
+ wp2fields,
+)
from pelican.utils import path_to_file_url, slugify
CUR_DIR = os.path.abspath(os.path.dirname(__file__))
-BLOGGER_XML_SAMPLE = os.path.join(CUR_DIR, 'content', 'bloggerexport.xml')
-WORDPRESS_XML_SAMPLE = os.path.join(CUR_DIR, 'content', 'wordpressexport.xml')
-WORDPRESS_ENCODED_CONTENT_SAMPLE = os.path.join(CUR_DIR,
- 'content',
- 'wordpress_content_encoded')
-WORDPRESS_DECODED_CONTENT_SAMPLE = os.path.join(CUR_DIR,
- 'content',
- 'wordpress_content_decoded')
+BLOGGER_XML_SAMPLE = os.path.join(CUR_DIR, "content", "bloggerexport.xml")
+WORDPRESS_XML_SAMPLE = os.path.join(CUR_DIR, "content", "wordpressexport.xml")
+WORDPRESS_ENCODED_CONTENT_SAMPLE = os.path.join(
+ CUR_DIR, "content", "wordpress_content_encoded"
+)
+WORDPRESS_DECODED_CONTENT_SAMPLE = os.path.join(
+ CUR_DIR, "content", "wordpress_content_decoded"
+)
try:
from bs4 import BeautifulSoup
@@ -36,10 +45,9 @@
LXML = False
-@skipIfNoExecutable(['pandoc', '--version'])
-@unittest.skipUnless(BeautifulSoup, 'Needs BeautifulSoup module')
+@skipIfNoExecutable(["pandoc", "--version"])
+@unittest.skipUnless(BeautifulSoup, "Needs BeautifulSoup module")
class TestBloggerXmlImporter(TestCaseWithCLocale):
-
def setUp(self):
super().setUp()
self.posts = blogger2fields(BLOGGER_XML_SAMPLE)
@@ -50,16 +58,17 @@ def test_recognise_kind_and_title(self):
"""
test_posts = list(self.posts)
kinds = {x[8] for x in test_posts}
- self.assertEqual({'page', 'article', 'comment'}, kinds)
- page_titles = {x[0] for x in test_posts if x[8] == 'page'}
- self.assertEqual({'Test page', 'Test page 2'}, page_titles)
- article_titles = {x[0] for x in test_posts if x[8] == 'article'}
- self.assertEqual({'Black as Egypt\'s Night', 'The Steel Windpipe'},
- article_titles)
- comment_titles = {x[0] for x in test_posts if x[8] == 'comment'}
- self.assertEqual({'Mishka, always a pleasure to read your '
- 'adventures!...'},
- comment_titles)
+ self.assertEqual({"page", "article", "comment"}, kinds)
+ page_titles = {x[0] for x in test_posts if x[8] == "page"}
+ self.assertEqual({"Test page", "Test page 2"}, page_titles)
+ article_titles = {x[0] for x in test_posts if x[8] == "article"}
+ self.assertEqual(
+ {"Black as Egypt's Night", "The Steel Windpipe"}, article_titles
+ )
+ comment_titles = {x[0] for x in test_posts if x[8] == "comment"}
+ self.assertEqual(
+ {"Mishka, always a pleasure to read your " "adventures!..."}, comment_titles
+ )
def test_recognise_status_with_correct_filename(self):
"""Check that importerer outputs only statuses 'published' and 'draft',
@@ -67,24 +76,25 @@ def test_recognise_status_with_correct_filename(self):
"""
test_posts = list(self.posts)
statuses = {x[7] for x in test_posts}
- self.assertEqual({'published', 'draft'}, statuses)
+ self.assertEqual({"published", "draft"}, statuses)
- draft_filenames = {x[2] for x in test_posts if x[7] == 'draft'}
+ draft_filenames = {x[2] for x in test_posts if x[7] == "draft"}
# draft filenames are id-based
- self.assertEqual({'page-4386962582497458967',
- 'post-1276418104709695660'}, draft_filenames)
+ self.assertEqual(
+ {"page-4386962582497458967", "post-1276418104709695660"}, draft_filenames
+ )
- published_filenames = {x[2] for x in test_posts if x[7] == 'published'}
+ published_filenames = {x[2] for x in test_posts if x[7] == "published"}
# published filenames are url-based, except comments
- self.assertEqual({'the-steel-windpipe',
- 'test-page',
- 'post-5590533389087749201'}, published_filenames)
+ self.assertEqual(
+ {"the-steel-windpipe", "test-page", "post-5590533389087749201"},
+ published_filenames,
+ )
-@skipIfNoExecutable(['pandoc', '--version'])
-@unittest.skipUnless(BeautifulSoup, 'Needs BeautifulSoup module')
+@skipIfNoExecutable(["pandoc", "--version"])
+@unittest.skipUnless(BeautifulSoup, "Needs BeautifulSoup module")
class TestWordpressXmlImporter(TestCaseWithCLocale):
-
def setUp(self):
super().setUp()
self.posts = wp2fields(WORDPRESS_XML_SAMPLE)
@@ -92,30 +102,49 @@ def setUp(self):
def test_ignore_empty_posts(self):
self.assertTrue(self.posts)
- for (title, content, fname, date, author,
- categ, tags, status, kind, format) in self.posts:
+ for (
+ title,
+ content,
+ fname,
+ date,
+ author,
+ categ,
+ tags,
+ status,
+ kind,
+ format,
+ ) in self.posts:
self.assertTrue(title.strip())
def test_recognise_page_kind(self):
- """ Check that we recognise pages in wordpress, as opposed to posts """
+ """Check that we recognise pages in wordpress, as opposed to posts"""
self.assertTrue(self.posts)
# Collect (title, filename, kind) of non-empty posts recognised as page
pages_data = []
- for (title, content, fname, date, author,
- categ, tags, status, kind, format) in self.posts:
- if kind == 'page':
+ for (
+ title,
+ content,
+ fname,
+ date,
+ author,
+ categ,
+ tags,
+ status,
+ kind,
+ format,
+ ) in self.posts:
+ if kind == "page":
pages_data.append((title, fname))
self.assertEqual(2, len(pages_data))
- self.assertEqual(('Page', 'contact'), pages_data[0])
- self.assertEqual(('Empty Page', 'empty'), pages_data[1])
+ self.assertEqual(("Page", "contact"), pages_data[0])
+ self.assertEqual(("Empty Page", "empty"), pages_data[1])
def test_dirpage_directive_for_page_kind(self):
silent_f2p = mute(True)(fields2pelican)
test_post = filter(lambda p: p[0].startswith("Empty Page"), self.posts)
with temporary_folder() as temp:
- fname = list(silent_f2p(test_post, 'markdown',
- temp, dirpage=True))[0]
- self.assertTrue(fname.endswith('pages%sempty.md' % os.path.sep))
+ fname = list(silent_f2p(test_post, "markdown", temp, dirpage=True))[0]
+ self.assertTrue(fname.endswith("pages%sempty.md" % os.path.sep))
def test_dircat(self):
silent_f2p = mute(True)(fields2pelican)
@@ -125,14 +154,13 @@ def test_dircat(self):
if len(post[5]) > 0: # Has a category
test_posts.append(post)
with temporary_folder() as temp:
- fnames = list(silent_f2p(test_posts, 'markdown',
- temp, dircat=True))
- subs = DEFAULT_CONFIG['SLUG_REGEX_SUBSTITUTIONS']
+ fnames = list(silent_f2p(test_posts, "markdown", temp, dircat=True))
+ subs = DEFAULT_CONFIG["SLUG_REGEX_SUBSTITUTIONS"]
index = 0
for post in test_posts:
name = post[2]
category = slugify(post[5][0], regex_subs=subs, preserve_case=True)
- name += '.md'
+ name += ".md"
filename = os.path.join(category, name)
out_name = fnames[index]
self.assertTrue(out_name.endswith(filename))
@@ -141,9 +169,19 @@ def test_dircat(self):
def test_unless_custom_post_all_items_should_be_pages_or_posts(self):
self.assertTrue(self.posts)
pages_data = []
- for (title, content, fname, date, author, categ,
- tags, status, kind, format) in self.posts:
- if kind == 'page' or kind == 'article':
+ for (
+ title,
+ content,
+ fname,
+ date,
+ author,
+ categ,
+ tags,
+ status,
+ kind,
+ format,
+ ) in self.posts:
+ if kind == "page" or kind == "article":
pass
else:
pages_data.append((title, fname))
@@ -152,40 +190,45 @@ def test_unless_custom_post_all_items_should_be_pages_or_posts(self):
def test_recognise_custom_post_type(self):
self.assertTrue(self.custposts)
cust_data = []
- for (title, content, fname, date, author, categ,
- tags, status, kind, format) in self.custposts:
- if kind == 'article' or kind == 'page':
+ for (
+ title,
+ content,
+ fname,
+ date,
+ author,
+ categ,
+ tags,
+ status,
+ kind,
+ format,
+ ) in self.custposts:
+ if kind == "article" or kind == "page":
pass
else:
cust_data.append((title, kind))
self.assertEqual(3, len(cust_data))
+ self.assertEqual(("A custom post in category 4", "custom1"), cust_data[0])
+ self.assertEqual(("A custom post in category 5", "custom1"), cust_data[1])
self.assertEqual(
- ('A custom post in category 4', 'custom1'),
- cust_data[0])
- self.assertEqual(
- ('A custom post in category 5', 'custom1'),
- cust_data[1])
- self.assertEqual(
- ('A 2nd custom post type also in category 5', 'custom2'),
- cust_data[2])
+ ("A 2nd custom post type also in category 5", "custom2"), cust_data[2]
+ )
def test_custom_posts_put_in_own_dir(self):
silent_f2p = mute(True)(fields2pelican)
test_posts = []
for post in self.custposts:
# check post kind
- if post[8] == 'article' or post[8] == 'page':
+ if post[8] == "article" or post[8] == "page":
pass
else:
test_posts.append(post)
with temporary_folder() as temp:
- fnames = list(silent_f2p(test_posts, 'markdown',
- temp, wp_custpost=True))
+ fnames = list(silent_f2p(test_posts, "markdown", temp, wp_custpost=True))
index = 0
for post in test_posts:
name = post[2]
kind = post[8]
- name += '.md'
+ name += ".md"
filename = os.path.join(kind, name)
out_name = fnames[index]
self.assertTrue(out_name.endswith(filename))
@@ -196,20 +239,21 @@ def test_custom_posts_put_in_own_dir_and_catagory_sub_dir(self):
test_posts = []
for post in self.custposts:
# check post kind
- if post[8] == 'article' or post[8] == 'page':
+ if post[8] == "article" or post[8] == "page":
pass
else:
test_posts.append(post)
with temporary_folder() as temp:
- fnames = list(silent_f2p(test_posts, 'markdown', temp,
- wp_custpost=True, dircat=True))
- subs = DEFAULT_CONFIG['SLUG_REGEX_SUBSTITUTIONS']
+ fnames = list(
+ silent_f2p(test_posts, "markdown", temp, wp_custpost=True, dircat=True)
+ )
+ subs = DEFAULT_CONFIG["SLUG_REGEX_SUBSTITUTIONS"]
index = 0
for post in test_posts:
name = post[2]
kind = post[8]
category = slugify(post[5][0], regex_subs=subs, preserve_case=True)
- name += '.md'
+ name += ".md"
filename = os.path.join(kind, category, name)
out_name = fnames[index]
self.assertTrue(out_name.endswith(filename))
@@ -221,16 +265,19 @@ def test_wp_custpost_true_dirpage_false(self):
test_posts = []
for post in self.custposts:
# check post kind
- if post[8] == 'page':
+ if post[8] == "page":
test_posts.append(post)
with temporary_folder() as temp:
- fnames = list(silent_f2p(test_posts, 'markdown', temp,
- wp_custpost=True, dirpage=False))
+ fnames = list(
+ silent_f2p(
+ test_posts, "markdown", temp, wp_custpost=True, dirpage=False
+ )
+ )
index = 0
for post in test_posts:
name = post[2]
- name += '.md'
- filename = os.path.join('pages', name)
+ name += ".md"
+ filename = os.path.join("pages", name)
out_name = fnames[index]
self.assertFalse(out_name.endswith(filename))
@@ -238,117 +285,114 @@ def test_can_toggle_raw_html_code_parsing(self):
test_posts = list(self.posts)
def r(f):
- with open(f, encoding='utf-8') as infile:
+ with open(f, encoding="utf-8") as infile:
return infile.read()
+
silent_f2p = mute(True)(fields2pelican)
with temporary_folder() as temp:
-
- rst_files = (r(f) for f
- in silent_f2p(test_posts, 'markdown', temp))
- self.assertTrue(any('<iframe' in rst for rst in rst_files))
- rst_files = (r(f) for f
- in silent_f2p(test_posts, 'markdown',
- temp, strip_raw=True))
- self.assertFalse(any('<iframe' in rst for rst in rst_files))
+ rst_files = (r(f) for f in silent_f2p(test_posts, "markdown", temp))
+ self.assertTrue(any("<iframe" in rst for rst in rst_files))
+ rst_files = (
+ r(f) for f in silent_f2p(test_posts, "markdown", temp, strip_raw=True)
+ )
+ self.assertFalse(any("<iframe" in rst for rst in rst_files))
# no effect in rst
- rst_files = (r(f) for f in silent_f2p(test_posts, 'rst', temp))
- self.assertFalse(any('<iframe' in rst for rst in rst_files))
- rst_files = (r(f) for f in silent_f2p(test_posts, 'rst', temp,
- strip_raw=True))
- self.assertFalse(any('<iframe' in rst for rst in rst_files))
+ rst_files = (r(f) for f in silent_f2p(test_posts, "rst", temp))
+ self.assertFalse(any("<iframe" in rst for rst in rst_files))
+ rst_files = (
+ r(f) for f in silent_f2p(test_posts, "rst", temp, strip_raw=True)
+ )
+ self.assertFalse(any("<iframe" in rst for rst in rst_files))
def test_decode_html_entities_in_titles(self):
- test_posts = [post for post
- in self.posts if post[2] == 'html-entity-test']
+ test_posts = [post for post in self.posts if post[2] == "html-entity-test"]
self.assertEqual(len(test_posts), 1)
post = test_posts[0]
title = post[0]
- self.assertTrue(title, "A normal post with some <html> entities in "
- "the title. You can't miss them.")
- self.assertNotIn('&', title)
+ self.assertTrue(
+ title,
+ "A normal post with some <html> entities in "
+ "the title. You can't miss them.",
+ )
+ self.assertNotIn("&", title)
def test_decode_wp_content_returns_empty(self):
- """ Check that given an empty string we return an empty string."""
+ """Check that given an empty string we return an empty string."""
self.assertEqual(decode_wp_content(""), "")
def test_decode_wp_content(self):
- """ Check that we can decode a wordpress content string."""
+ """Check that we can decode a wordpress content string."""
with open(WORDPRESS_ENCODED_CONTENT_SAMPLE) as encoded_file:
encoded_content = encoded_file.read()
with open(WORDPRESS_DECODED_CONTENT_SAMPLE) as decoded_file:
decoded_content = decoded_file.read()
self.assertEqual(
- decode_wp_content(encoded_content, br=False),
- decoded_content)
+ decode_wp_content(encoded_content, br=False), decoded_content
+ )
def test_preserve_verbatim_formatting(self):
def r(f):
- with open(f, encoding='utf-8') as infile:
+ with open(f, encoding="utf-8") as infile:
return infile.read()
+
silent_f2p = mute(True)(fields2pelican)
- test_post = filter(
- lambda p: p[0].startswith("Code in List"),
- self.posts)
+ test_post = filter(lambda p: p[0].startswith("Code in List"), self.posts)
with temporary_folder() as temp:
- md = [r(f) for f in silent_f2p(test_post, 'markdown', temp)][0]
- self.assertTrue(re.search(r'\s+a = \[1, 2, 3\]', md))
- self.assertTrue(re.search(r'\s+b = \[4, 5, 6\]', md))
+ md = [r(f) for f in silent_f2p(test_post, "markdown", temp)][0]
+ self.assertTrue(re.search(r"\s+a = \[1, 2, 3\]", md))
+ self.assertTrue(re.search(r"\s+b = \[4, 5, 6\]", md))
- for_line = re.search(r'\s+for i in zip\(a, b\):', md).group(0)
- print_line = re.search(r'\s+print i', md).group(0)
- self.assertTrue(
- for_line.rindex('for') < print_line.rindex('print'))
+ for_line = re.search(r"\s+for i in zip\(a, b\):", md).group(0)
+ print_line = re.search(r"\s+print i", md).group(0)
+ self.assertTrue(for_line.rindex("for") < print_line.rindex("print"))
def test_code_in_list(self):
def r(f):
- with open(f, encoding='utf-8') as infile:
+ with open(f, encoding="utf-8") as infile:
return infile.read()
+
silent_f2p = mute(True)(fields2pelican)
- test_post = filter(
- lambda p: p[0].startswith("Code in List"),
- self.posts)
+ test_post = filter(lambda p: p[0].startswith("Code in List"), self.posts)
with temporary_folder() as temp:
- md = [r(f) for f in silent_f2p(test_post, 'markdown', temp)][0]
- sample_line = re.search(r'- This is a code sample', md).group(0)
- code_line = re.search(r'\s+a = \[1, 2, 3\]', md).group(0)
- self.assertTrue(sample_line.rindex('This') < code_line.rindex('a'))
+ md = [r(f) for f in silent_f2p(test_post, "markdown", temp)][0]
+ sample_line = re.search(r"- This is a code sample", md).group(0)
+ code_line = re.search(r"\s+a = \[1, 2, 3\]", md).group(0)
+ self.assertTrue(sample_line.rindex("This") < code_line.rindex("a"))
def test_dont_use_smart_quotes(self):
def r(f):
- with open(f, encoding='utf-8') as infile:
+ with open(f, encoding="utf-8") as infile:
return infile.read()
+
silent_f2p = mute(True)(fields2pelican)
- test_post = filter(
- lambda p: p[0].startswith("Post with raw data"),
- self.posts)
+ test_post = filter(lambda p: p[0].startswith("Post with raw data"), self.posts)
with temporary_folder() as temp:
- md = [r(f) for f in silent_f2p(test_post, 'markdown', temp)][0]
+ md = [r(f) for f in silent_f2p(test_post, "markdown", temp)][0]
escaped_quotes = re.search(r'\\[\'"“”‘’]', md)
self.assertFalse(escaped_quotes)
def test_convert_caption_to_figure(self):
def r(f):
- with open(f, encoding='utf-8') as infile:
+ with open(f, encoding="utf-8") as infile:
return infile.read()
+
silent_f2p = mute(True)(fields2pelican)
- test_post = filter(
- lambda p: p[0].startswith("Caption on image"),
- self.posts)
+ test_post = filter(lambda p: p[0].startswith("Caption on image"), self.posts)
with temporary_folder() as temp:
- md = [r(f) for f in silent_f2p(test_post, 'markdown', temp)][0]
+ md = [r(f) for f in silent_f2p(test_post, "markdown", temp)][0]
- caption = re.search(r'\[caption', md)
+ caption = re.search(r"\[caption", md)
self.assertFalse(caption)
for occurence in [
- '/theme/img/xpelican.png.pagespeed.ic.Rjep0025-y.png',
- '/theme/img/xpelican-3.png.pagespeed.ic.m-NAIdRCOM.png',
- '/theme/img/xpelican.png.pagespeed.ic.Rjep0025-y.png',
- 'This is a pelican',
- 'This also a pelican',
- 'Yet another pelican',
+ "/theme/img/xpelican.png.pagespeed.ic.Rjep0025-y.png",
+ "/theme/img/xpelican-3.png.pagespeed.ic.m-NAIdRCOM.png",
+ "/theme/img/xpelican.png.pagespeed.ic.Rjep0025-y.png",
+ "This is a pelican",
+ "This also a pelican",
+ "Yet another pelican",
]:
# pandoc 2.x converts into ![text](src)
# pandoc 3.x converts into <figure>src<figcaption>text</figcaption></figure>
@@ -357,70 +401,97 @@ def r(f):
class TestBuildHeader(unittest.TestCase):
def test_build_header(self):
- header = build_header('test', None, None, None, None, None)
- self.assertEqual(header, 'test\n####\n\n')
+ header = build_header("test", None, None, None, None, None)
+ self.assertEqual(header, "test\n####\n\n")
def test_build_header_with_fields(self):
header_data = [
- 'Test Post',
- '2014-11-04',
- 'Alexis Métaireau',
- ['Programming'],
- ['Pelican', 'Python'],
- 'test-post',
+ "Test Post",
+ "2014-11-04",
+ "Alexis Métaireau",
+ ["Programming"],
+ ["Pelican", "Python"],
+ "test-post",
]
- expected_docutils = '\n'.join([
- 'Test Post',
- '#########',
- ':date: 2014-11-04',
- ':author: Alexis Métaireau',
- ':category: Programming',
- ':tags: Pelican, Python',
- ':slug: test-post',
- '\n',
- ])
-
- expected_md = '\n'.join([
- 'Title: Test Post',
- 'Date: 2014-11-04',
- 'Author: Alexis Métaireau',
- 'Category: Programming',
- 'Tags: Pelican, Python',
- 'Slug: test-post',
- '\n',
- ])
+ expected_docutils = "\n".join(
+ [
+ "Test Post",
+ "#########",
+ ":date: 2014-11-04",
+ ":author: Alexis Métaireau",
+ ":category: Programming",
+ ":tags: Pelican, Python",
+ ":slug: test-post",
+ "\n",
+ ]
+ )
+
+ expected_md = "\n".join(
+ [
+ "Title: Test Post",
+ "Date: 2014-11-04",
+ "Author: Alexis Métaireau",
+ "Category: Programming",
+ "Tags: Pelican, Python",
+ "Slug: test-post",
+ "\n",
+ ]
+ )
self.assertEqual(build_header(*header_data), expected_docutils)
self.assertEqual(build_markdown_header(*header_data), expected_md)
def test_build_header_with_east_asian_characters(self):
- header = build_header('これは広い幅の文字だけで構成されたタイトルです',
- None, None, None, None, None)
+ header = build_header(
+ "これは広い幅の文字だけで構成されたタイトルです",
+ None,
+ None,
+ None,
+ None,
+ None,
+ )
- self.assertEqual(header,
- ('これは広い幅の文字だけで構成されたタイトルです\n'
- '##############################################'
- '\n\n'))
+ self.assertEqual(
+ header,
+ (
+ "これは広い幅の文字だけで構成されたタイトルです\n"
+ "##############################################"
+ "\n\n"
+ ),
+ )
def test_galleries_added_to_header(self):
- header = build_header('test', None, None, None, None, None,
- attachments=['output/test1', 'output/test2'])
- self.assertEqual(header, ('test\n####\n'
- ':attachments: output/test1, '
- 'output/test2\n\n'))
+ header = build_header(
+ "test",
+ None,
+ None,
+ None,
+ None,
+ None,
+ attachments=["output/test1", "output/test2"],
+ )
+ self.assertEqual(
+ header, ("test\n####\n" ":attachments: output/test1, " "output/test2\n\n")
+ )
def test_galleries_added_to_markdown_header(self):
- header = build_markdown_header('test', None, None, None, None, None,
- attachments=['output/test1',
- 'output/test2'])
+ header = build_markdown_header(
+ "test",
+ None,
+ None,
+ None,
+ None,
+ None,
+ attachments=["output/test1", "output/test2"],
+ )
self.assertEqual(
- header,
- 'Title: test\nAttachments: output/test1, output/test2\n\n')
+ header, "Title: test\nAttachments: output/test1, output/test2\n\n"
+ )
-@unittest.skipUnless(BeautifulSoup, 'Needs BeautifulSoup module')
-@unittest.skipUnless(LXML, 'Needs lxml module')
+@unittest.skipUnless(BeautifulSoup, "Needs BeautifulSoup module")
+@unittest.skipUnless(LXML, "Needs lxml module")
class TestWordpressXMLAttachements(TestCaseWithCLocale):
def setUp(self):
super().setUp()
@@ -435,38 +506,45 @@ def test_attachments_associated_with_correct_post(self):
for post in self.attachments.keys():
if post is None:
expected = {
- ('https://upload.wikimedia.org/wikipedia/commons/'
- 'thumb/2/2c/Pelican_lakes_entrance02.jpg/'
- '240px-Pelican_lakes_entrance02.jpg')
+ (
+ "https://upload.wikimedia.org/wikipedia/commons/"
+ "thumb/2/2c/Pelican_lakes_entrance02.jpg/"
+ "240px-Pelican_lakes_entrance02.jpg"
+ )
}
self.assertEqual(self.attachments[post], expected)
- elif post == 'with-excerpt':
- expected_invalid = ('http://thisurlisinvalid.notarealdomain/'
- 'not_an_image.jpg')
- expected_pelikan = ('http://en.wikipedia.org/wiki/'
- 'File:Pelikan_Walvis_Bay.jpg')
- self.assertEqual(self.attachments[post],
- {expected_invalid, expected_pelikan})
- elif post == 'with-tags':
- expected_invalid = ('http://thisurlisinvalid.notarealdomain')
+ elif post == "with-excerpt":
+ expected_invalid = (
+ "http://thisurlisinvalid.notarealdomain/" "not_an_image.jpg"
+ )
+ expected_pelikan = (
+ "http://en.wikipedia.org/wiki/" "File:Pelikan_Walvis_Bay.jpg"
+ )
+ self.assertEqual(
+ self.attachments[post], {expected_invalid, expected_pelikan}
+ )
+ elif post == "with-tags":
+ expected_invalid = "http://thisurlisinvalid.notarealdomain"
self.assertEqual(self.attachments[post], {expected_invalid})
else:
- self.fail('all attachments should match to a '
- 'filename or None, {}'
- .format(post))
+ self.fail(
+ "all attachments should match to a " "filename or None, {}".format(
+ post
+ )
+ )
def test_download_attachments(self):
- real_file = os.path.join(CUR_DIR, 'content/article.rst')
+ real_file = os.path.join(CUR_DIR, "content/article.rst")
good_url = path_to_file_url(real_file)
- bad_url = 'http://localhost:1/not_a_file.txt'
+ bad_url = "http://localhost:1/not_a_file.txt"
silent_da = mute()(download_attachments)
with temporary_folder() as temp:
locations = list(silent_da(temp, [good_url, bad_url]))
self.assertEqual(1, len(locations))
directory = locations[0]
self.assertTrue(
- directory.endswith(posix_join('content', 'article.rst')),
- directory)
+ directory.endswith(posix_join("content", "article.rst")), directory
+ )
class TestTumblrImporter(TestCaseWithCLocale):
@@ -484,32 +562,42 @@ def get_posts(api_key, blogname, offset=0):
"timestamp": 1573162000,
"format": "html",
"slug": "a-slug",
- "tags": [
- "economics"
- ],
+ "tags": ["economics"],
"state": "published",
-
"photos": [
{
"caption": "",
"original_size": {
"url": "https://..fccdc2360ba7182a.jpg",
"width": 634,
- "height": 789
+ "height": 789,
},
- }]
+ }
+ ],
}
]
+
get.side_effect = get_posts
posts = list(tumblr2fields("api_key", "blogname"))
self.assertEqual(
- [('Photo',
- '<img alt="" src="https://..fccdc2360ba7182a.jpg" />\n',
- '2019-11-07-a-slug', '2019-11-07 21:26:40+0000', 'testy', ['photo'],
- ['economics'], 'published', 'article', 'html')],
+ [
+ (
+ "Photo",
+ '<img alt="" src="https://..fccdc2360ba7182a.jpg" />\n',
+ "2019-11-07-a-slug",
+ "2019-11-07 21:26:40+0000",
+ "testy",
+ ["photo"],
+ ["economics"],
+ "published",
+ "article",
+ "html",
+ )
+ ],
+ posts,
posts,
- posts)
+ )
@patch("pelican.tools.pelican_import._get_tumblr_posts")
def test_video_embed(self, get):
@@ -531,40 +619,39 @@ def get_posts(api_key, blogname, offset=0):
"source_title": "youtube.com",
"caption": "<p>Caption</p>",
"player": [
- {
- "width": 250,
- "embed_code":
- "<iframe>1</iframe>"
- },
- {
- "width": 400,
- "embed_code":
- "<iframe>2</iframe>"
- },
- {
- "width": 500,
- "embed_code":
- "<iframe>3</iframe>"
- }
+ {"width": 250, "embed_code": "<iframe>1</iframe>"},
+ {"width": 400, "embed_code": "<iframe>2</iframe>"},
+ {"width": 500, "embed_code": "<iframe>3</iframe>"},
],
"video_type": "youtube",
}
- ]
+ ]
+
get.side_effect = get_posts
posts = list(tumblr2fields("api_key", "blogname"))
self.assertEqual(
- [('youtube.com',
- '<p><a href="https://href.li/?'
- 'https://www.youtube.com/a">via</a></p>\n<p>Caption</p>'
- '<iframe>1</iframe>\n'
- '<iframe>2</iframe>\n'
- '<iframe>3</iframe>\n',
- '2017-07-07-the-slug',
- '2017-07-07 20:31:41+0000', 'testy', ['video'], [], 'published',
- 'article', 'html')],
+ [
+ (
+ "youtube.com",
+ '<p><a href="https://href.li/?'
+ 'https://www.youtube.com/a">via</a></p>\n<p>Caption</p>'
+ "<iframe>1</iframe>\n"
+ "<iframe>2</iframe>\n"
+ "<iframe>3</iframe>\n",
+ "2017-07-07-the-slug",
+ "2017-07-07 20:31:41+0000",
+ "testy",
+ ["video"],
+ [],
+ "published",
+ "article",
+ "html",
+ )
+ ],
+ posts,
posts,
- posts)
+ )
@patch("pelican.tools.pelican_import._get_tumblr_posts")
def test_broken_video_embed(self, get):
@@ -581,42 +668,43 @@ def get_posts(api_key, blogname, offset=0):
"timestamp": 1471192655,
"state": "published",
"format": "html",
- "tags": [
- "interviews"
- ],
- "source_url":
- "https://href.li/?https://www.youtube.com/watch?v=b",
+ "tags": ["interviews"],
+ "source_url": "https://href.li/?https://www.youtube.com/watch?v=b",
"source_title": "youtube.com",
- "caption":
- "<p>Caption</p>",
+ "caption": "<p>Caption</p>",
"player": [
{
"width": 250,
# If video is gone, embed_code is False
- "embed_code": False
+ "embed_code": False,
},
- {
- "width": 400,
- "embed_code": False
- },
- {
- "width": 500,
- "embed_code": False
- }
+ {"width": 400, "embed_code": False},
+ {"width": 500, "embed_code": False},
],
"video_type": "youtube",
}
]
+
get.side_effect = get_posts
posts = list(tumblr2fields("api_key", "blogname"))
self.assertEqual(
- [('youtube.com',
- '<p><a href="https://href.li/?https://www.youtube.com/watch?'
- 'v=b">via</a></p>\n<p>Caption</p>'
- '<p>(This video isn\'t available anymore.)</p>\n',
- '2016-08-14-the-slug',
- '2016-08-14 16:37:35+0000', 'testy', ['video'], ['interviews'],
- 'published', 'article', 'html')],
+ [
+ (
+ "youtube.com",
+ '<p><a href="https://href.li/?https://www.youtube.com/watch?'
+ 'v=b">via</a></p>\n<p>Caption</p>'
+ "<p>(This video isn't available anymore.)</p>\n",
+ "2016-08-14-the-slug",
+ "2016-08-14 16:37:35+0000",
+ "testy",
+ ["video"],
+ ["interviews"],
+ "published",
+ "article",
+ "html",
+ )
+ ],
+ posts,
posts,
- posts)
+ )
diff --git a/pelican/tests/test_log.py b/pelican/tests/test_log.py
--- a/pelican/tests/test_log.py
+++ b/pelican/tests/test_log.py
@@ -35,48 +35,41 @@ def reset_logger(self):
def test_log_filter(self):
def do_logging():
for i in range(5):
- self.logger.warning('Log %s', i)
- self.logger.warning('Another log %s', i)
+ self.logger.warning("Log %s", i)
+ self.logger.warning("Another log %s", i)
+
# no filter
with self.reset_logger():
do_logging()
+ self.assertEqual(self.handler.count_logs("Log \\d", logging.WARNING), 5)
self.assertEqual(
- self.handler.count_logs('Log \\d', logging.WARNING),
- 5)
- self.assertEqual(
- self.handler.count_logs('Another log \\d', logging.WARNING),
- 5)
+ self.handler.count_logs("Another log \\d", logging.WARNING), 5
+ )
# filter by template
with self.reset_logger():
- log.LimitFilter._ignore.add((logging.WARNING, 'Log %s'))
+ log.LimitFilter._ignore.add((logging.WARNING, "Log %s"))
do_logging()
+ self.assertEqual(self.handler.count_logs("Log \\d", logging.WARNING), 0)
self.assertEqual(
- self.handler.count_logs('Log \\d', logging.WARNING),
- 0)
- self.assertEqual(
- self.handler.count_logs('Another log \\d', logging.WARNING),
- 5)
+ self.handler.count_logs("Another log \\d", logging.WARNING), 5
+ )
# filter by exact message
with self.reset_logger():
- log.LimitFilter._ignore.add((logging.WARNING, 'Log 3'))
+ log.LimitFilter._ignore.add((logging.WARNING, "Log 3"))
do_logging()
+ self.assertEqual(self.handler.count_logs("Log \\d", logging.WARNING), 4)
self.assertEqual(
- self.handler.count_logs('Log \\d', logging.WARNING),
- 4)
- self.assertEqual(
- self.handler.count_logs('Another log \\d', logging.WARNING),
- 5)
+ self.handler.count_logs("Another log \\d", logging.WARNING), 5
+ )
# filter by both
with self.reset_logger():
- log.LimitFilter._ignore.add((logging.WARNING, 'Log 3'))
- log.LimitFilter._ignore.add((logging.WARNING, 'Another log %s'))
+ log.LimitFilter._ignore.add((logging.WARNING, "Log 3"))
+ log.LimitFilter._ignore.add((logging.WARNING, "Another log %s"))
do_logging()
+ self.assertEqual(self.handler.count_logs("Log \\d", logging.WARNING), 4)
self.assertEqual(
- self.handler.count_logs('Log \\d', logging.WARNING),
- 4)
- self.assertEqual(
- self.handler.count_logs('Another log \\d', logging.WARNING),
- 0)
+ self.handler.count_logs("Another log \\d", logging.WARNING), 0
+ )
diff --git a/pelican/tests/test_paginator.py b/pelican/tests/test_paginator.py
--- a/pelican/tests/test_paginator.py
+++ b/pelican/tests/test_paginator.py
@@ -17,17 +17,17 @@ class TestPage(unittest.TestCase):
def setUp(self):
super().setUp()
self.old_locale = locale.setlocale(locale.LC_ALL)
- locale.setlocale(locale.LC_ALL, 'C')
+ locale.setlocale(locale.LC_ALL, "C")
self.page_kwargs = {
- 'content': TEST_CONTENT,
- 'context': {
- 'localsiteurl': '',
+ "content": TEST_CONTENT,
+ "context": {
+ "localsiteurl": "",
},
- 'metadata': {
- 'summary': TEST_SUMMARY,
- 'title': 'foo bar',
+ "metadata": {
+ "summary": TEST_SUMMARY,
+ "title": "foo bar",
},
- 'source_path': '/path/to/file/foo.ext'
+ "source_path": "/path/to/file/foo.ext",
}
def tearDown(self):
@@ -37,68 +37,79 @@ def test_save_as_preservation(self):
settings = get_settings()
# fix up pagination rules
from pelican.paginator import PaginationRule
+
pagination_rules = [
- PaginationRule(*r) for r in settings.get(
- 'PAGINATION_PATTERNS',
- DEFAULT_CONFIG['PAGINATION_PATTERNS'],
+ PaginationRule(*r)
+ for r in settings.get(
+ "PAGINATION_PATTERNS",
+ DEFAULT_CONFIG["PAGINATION_PATTERNS"],
)
]
- settings['PAGINATION_PATTERNS'] = sorted(
+ settings["PAGINATION_PATTERNS"] = sorted(
pagination_rules,
key=lambda r: r[0],
)
- self.page_kwargs['metadata']['author'] = Author('Blogger', settings)
- object_list = [Article(**self.page_kwargs),
- Article(**self.page_kwargs)]
- paginator = Paginator('foobar.foo', 'foobar/foo', object_list,
- settings)
+ self.page_kwargs["metadata"]["author"] = Author("Blogger", settings)
+ object_list = [Article(**self.page_kwargs), Article(**self.page_kwargs)]
+ paginator = Paginator("foobar.foo", "foobar/foo", object_list, settings)
page = paginator.page(1)
- self.assertEqual(page.save_as, 'foobar.foo')
+ self.assertEqual(page.save_as, "foobar.foo")
def test_custom_pagination_pattern(self):
from pelican.paginator import PaginationRule
+
settings = get_settings()
- settings['PAGINATION_PATTERNS'] = [PaginationRule(*r) for r in [
- (1, '/{url}', '{base_name}/index.html'),
- (2, '/{url}{number}/', '{base_name}/{number}/index.html')
- ]]
-
- self.page_kwargs['metadata']['author'] = Author('Blogger', settings)
- object_list = [Article(**self.page_kwargs),
- Article(**self.page_kwargs)]
- paginator = Paginator('blog/index.html', '//blog.my.site/',
- object_list, settings, 1)
+ settings["PAGINATION_PATTERNS"] = [
+ PaginationRule(*r)
+ for r in [
+ (1, "/{url}", "{base_name}/index.html"),
+ (2, "/{url}{number}/", "{base_name}/{number}/index.html"),
+ ]
+ ]
+
+ self.page_kwargs["metadata"]["author"] = Author("Blogger", settings)
+ object_list = [Article(**self.page_kwargs), Article(**self.page_kwargs)]
+ paginator = Paginator(
+ "blog/index.html", "//blog.my.site/", object_list, settings, 1
+ )
# The URL *has to* stay absolute (with // in the front), so verify that
page1 = paginator.page(1)
- self.assertEqual(page1.save_as, 'blog/index.html')
- self.assertEqual(page1.url, '//blog.my.site/')
+ self.assertEqual(page1.save_as, "blog/index.html")
+ self.assertEqual(page1.url, "//blog.my.site/")
page2 = paginator.page(2)
- self.assertEqual(page2.save_as, 'blog/2/index.html')
- self.assertEqual(page2.url, '//blog.my.site/2/')
+ self.assertEqual(page2.save_as, "blog/2/index.html")
+ self.assertEqual(page2.url, "//blog.my.site/2/")
def test_custom_pagination_pattern_last_page(self):
from pelican.paginator import PaginationRule
+
settings = get_settings()
- settings['PAGINATION_PATTERNS'] = [PaginationRule(*r) for r in [
- (1, '/{url}1/', '{base_name}/1/index.html'),
- (2, '/{url}{number}/', '{base_name}/{number}/index.html'),
- (-1, '/{url}', '{base_name}/index.html'),
- ]]
-
- self.page_kwargs['metadata']['author'] = Author('Blogger', settings)
- object_list = [Article(**self.page_kwargs),
- Article(**self.page_kwargs),
- Article(**self.page_kwargs)]
- paginator = Paginator('blog/index.html', '//blog.my.site/',
- object_list, settings, 1)
+ settings["PAGINATION_PATTERNS"] = [
+ PaginationRule(*r)
+ for r in [
+ (1, "/{url}1/", "{base_name}/1/index.html"),
+ (2, "/{url}{number}/", "{base_name}/{number}/index.html"),
+ (-1, "/{url}", "{base_name}/index.html"),
+ ]
+ ]
+
+ self.page_kwargs["metadata"]["author"] = Author("Blogger", settings)
+ object_list = [
+ Article(**self.page_kwargs),
+ Article(**self.page_kwargs),
+ Article(**self.page_kwargs),
+ ]
+ paginator = Paginator(
+ "blog/index.html", "//blog.my.site/", object_list, settings, 1
+ )
# The URL *has to* stay absolute (with // in the front), so verify that
page1 = paginator.page(1)
- self.assertEqual(page1.save_as, 'blog/1/index.html')
- self.assertEqual(page1.url, '//blog.my.site/1/')
+ self.assertEqual(page1.save_as, "blog/1/index.html")
+ self.assertEqual(page1.url, "//blog.my.site/1/")
page2 = paginator.page(2)
- self.assertEqual(page2.save_as, 'blog/2/index.html')
- self.assertEqual(page2.url, '//blog.my.site/2/')
+ self.assertEqual(page2.save_as, "blog/2/index.html")
+ self.assertEqual(page2.url, "//blog.my.site/2/")
page3 = paginator.page(3)
- self.assertEqual(page3.save_as, 'blog/index.html')
- self.assertEqual(page3.url, '//blog.my.site/')
+ self.assertEqual(page3.save_as, "blog/index.html")
+ self.assertEqual(page3.url, "//blog.my.site/")
diff --git a/pelican/tests/test_pelican.py b/pelican/tests/test_pelican.py
--- a/pelican/tests/test_pelican.py
+++ b/pelican/tests/test_pelican.py
@@ -20,9 +20,10 @@
)
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
-SAMPLES_PATH = os.path.abspath(os.path.join(
- CURRENT_DIR, os.pardir, os.pardir, 'samples'))
-OUTPUT_PATH = os.path.abspath(os.path.join(CURRENT_DIR, 'output'))
+SAMPLES_PATH = os.path.abspath(
+ os.path.join(CURRENT_DIR, os.pardir, os.pardir, "samples")
+)
+OUTPUT_PATH = os.path.abspath(os.path.join(CURRENT_DIR, "output"))
INPUT_PATH = os.path.join(SAMPLES_PATH, "content")
SAMPLE_CONFIG = os.path.join(SAMPLES_PATH, "pelican.conf.py")
@@ -31,9 +32,9 @@
def recursiveDiff(dcmp):
diff = {
- 'diff_files': [os.path.join(dcmp.right, f) for f in dcmp.diff_files],
- 'left_only': [os.path.join(dcmp.right, f) for f in dcmp.left_only],
- 'right_only': [os.path.join(dcmp.right, f) for f in dcmp.right_only],
+ "diff_files": [os.path.join(dcmp.right, f) for f in dcmp.diff_files],
+ "left_only": [os.path.join(dcmp.right, f) for f in dcmp.left_only],
+ "right_only": [os.path.join(dcmp.right, f) for f in dcmp.right_only],
}
for sub_dcmp in dcmp.subdirs.values():
for k, v in recursiveDiff(sub_dcmp).items():
@@ -47,11 +48,11 @@ class TestPelican(LoggedTestCase):
def setUp(self):
super().setUp()
- self.temp_path = mkdtemp(prefix='pelicantests.')
- self.temp_cache = mkdtemp(prefix='pelican_cache.')
+ self.temp_path = mkdtemp(prefix="pelicantests.")
+ self.temp_cache = mkdtemp(prefix="pelican_cache.")
self.maxDiff = None
self.old_locale = locale.setlocale(locale.LC_ALL)
- locale.setlocale(locale.LC_ALL, 'C')
+ locale.setlocale(locale.LC_ALL, "C")
def tearDown(self):
read_settings() # cleanup PYGMENTS_RST_OPTIONS
@@ -70,8 +71,8 @@ def assertDirsEqual(self, left_path, right_path, msg=None):
if proc.returncode != 0:
msg = self._formatMessage(
msg,
- "%s and %s differ:\nstdout:\n%s\nstderr\n%s" %
- (left_path, right_path, out, err)
+ "%s and %s differ:\nstdout:\n%s\nstderr\n%s"
+ % (left_path, right_path, out, err),
)
raise self.failureException(msg)
@@ -85,136 +86,154 @@ def test_order_of_generators(self):
self.assertTrue(
generator_classes[-1] is StaticGenerator,
- "StaticGenerator must be the last generator, but it isn't!")
+ "StaticGenerator must be the last generator, but it isn't!",
+ )
self.assertIsInstance(
- generator_classes, Sequence,
- "_get_generator_classes() must return a Sequence to preserve order")
+ generator_classes,
+ Sequence,
+ "_get_generator_classes() must return a Sequence to preserve order",
+ )
- @skipIfNoExecutable(['git', '--version'])
+ @skipIfNoExecutable(["git", "--version"])
def test_basic_generation_works(self):
# when running pelican without settings, it should pick up the default
# ones and generate correct output without raising any exception
- settings = read_settings(path=None, override={
- 'PATH': INPUT_PATH,
- 'OUTPUT_PATH': self.temp_path,
- 'CACHE_PATH': self.temp_cache,
- 'LOCALE': locale.normalize('en_US'),
- })
+ settings = read_settings(
+ path=None,
+ override={
+ "PATH": INPUT_PATH,
+ "OUTPUT_PATH": self.temp_path,
+ "CACHE_PATH": self.temp_cache,
+ "LOCALE": locale.normalize("en_US"),
+ },
+ )
pelican = Pelican(settings=settings)
mute(True)(pelican.run)()
- self.assertDirsEqual(
- self.temp_path, os.path.join(OUTPUT_PATH, 'basic')
- )
+ self.assertDirsEqual(self.temp_path, os.path.join(OUTPUT_PATH, "basic"))
self.assertLogCountEqual(
count=1,
msg="Unable to find.*skipping url replacement",
- level=logging.WARNING)
+ level=logging.WARNING,
+ )
- @skipIfNoExecutable(['git', '--version'])
+ @skipIfNoExecutable(["git", "--version"])
def test_custom_generation_works(self):
# the same thing with a specified set of settings should work
- settings = read_settings(path=SAMPLE_CONFIG, override={
- 'PATH': INPUT_PATH,
- 'OUTPUT_PATH': self.temp_path,
- 'CACHE_PATH': self.temp_cache,
- 'LOCALE': locale.normalize('en_US.UTF-8'),
- })
+ settings = read_settings(
+ path=SAMPLE_CONFIG,
+ override={
+ "PATH": INPUT_PATH,
+ "OUTPUT_PATH": self.temp_path,
+ "CACHE_PATH": self.temp_cache,
+ "LOCALE": locale.normalize("en_US.UTF-8"),
+ },
+ )
pelican = Pelican(settings=settings)
mute(True)(pelican.run)()
- self.assertDirsEqual(
- self.temp_path, os.path.join(OUTPUT_PATH, 'custom')
- )
+ self.assertDirsEqual(self.temp_path, os.path.join(OUTPUT_PATH, "custom"))
- @skipIfNoExecutable(['git', '--version'])
- @unittest.skipUnless(locale_available('fr_FR.UTF-8') or
- locale_available('French'), 'French locale needed')
+ @skipIfNoExecutable(["git", "--version"])
+ @unittest.skipUnless(
+ locale_available("fr_FR.UTF-8") or locale_available("French"),
+ "French locale needed",
+ )
def test_custom_locale_generation_works(self):
- '''Test that generation with fr_FR.UTF-8 locale works'''
- if sys.platform == 'win32':
- our_locale = 'French'
+ """Test that generation with fr_FR.UTF-8 locale works"""
+ if sys.platform == "win32":
+ our_locale = "French"
else:
- our_locale = 'fr_FR.UTF-8'
+ our_locale = "fr_FR.UTF-8"
- settings = read_settings(path=SAMPLE_FR_CONFIG, override={
- 'PATH': INPUT_PATH,
- 'OUTPUT_PATH': self.temp_path,
- 'CACHE_PATH': self.temp_cache,
- 'LOCALE': our_locale,
- })
+ settings = read_settings(
+ path=SAMPLE_FR_CONFIG,
+ override={
+ "PATH": INPUT_PATH,
+ "OUTPUT_PATH": self.temp_path,
+ "CACHE_PATH": self.temp_cache,
+ "LOCALE": our_locale,
+ },
+ )
pelican = Pelican(settings=settings)
mute(True)(pelican.run)()
- self.assertDirsEqual(
- self.temp_path, os.path.join(OUTPUT_PATH, 'custom_locale')
- )
+ self.assertDirsEqual(self.temp_path, os.path.join(OUTPUT_PATH, "custom_locale"))
def test_theme_static_paths_copy(self):
# the same thing with a specified set of settings should work
- settings = read_settings(path=SAMPLE_CONFIG, override={
- 'PATH': INPUT_PATH,
- 'OUTPUT_PATH': self.temp_path,
- 'CACHE_PATH': self.temp_cache,
- 'THEME_STATIC_PATHS': [os.path.join(SAMPLES_PATH, 'very'),
- os.path.join(SAMPLES_PATH, 'kinda'),
- os.path.join(SAMPLES_PATH,
- 'theme_standard')]
- })
+ settings = read_settings(
+ path=SAMPLE_CONFIG,
+ override={
+ "PATH": INPUT_PATH,
+ "OUTPUT_PATH": self.temp_path,
+ "CACHE_PATH": self.temp_cache,
+ "THEME_STATIC_PATHS": [
+ os.path.join(SAMPLES_PATH, "very"),
+ os.path.join(SAMPLES_PATH, "kinda"),
+ os.path.join(SAMPLES_PATH, "theme_standard"),
+ ],
+ },
+ )
pelican = Pelican(settings=settings)
mute(True)(pelican.run)()
- theme_output = os.path.join(self.temp_path, 'theme')
- extra_path = os.path.join(theme_output, 'exciting', 'new', 'files')
+ theme_output = os.path.join(self.temp_path, "theme")
+ extra_path = os.path.join(theme_output, "exciting", "new", "files")
- for file in ['a_stylesheet', 'a_template']:
+ for file in ["a_stylesheet", "a_template"]:
self.assertTrue(os.path.exists(os.path.join(theme_output, file)))
- for file in ['wow!', 'boom!', 'bap!', 'zap!']:
+ for file in ["wow!", "boom!", "bap!", "zap!"]:
self.assertTrue(os.path.exists(os.path.join(extra_path, file)))
def test_theme_static_paths_copy_single_file(self):
# the same thing with a specified set of settings should work
- settings = read_settings(path=SAMPLE_CONFIG, override={
- 'PATH': INPUT_PATH,
- 'OUTPUT_PATH': self.temp_path,
- 'CACHE_PATH': self.temp_cache,
- 'THEME_STATIC_PATHS': [os.path.join(SAMPLES_PATH,
- 'theme_standard')]
- })
+ settings = read_settings(
+ path=SAMPLE_CONFIG,
+ override={
+ "PATH": INPUT_PATH,
+ "OUTPUT_PATH": self.temp_path,
+ "CACHE_PATH": self.temp_cache,
+ "THEME_STATIC_PATHS": [os.path.join(SAMPLES_PATH, "theme_standard")],
+ },
+ )
pelican = Pelican(settings=settings)
mute(True)(pelican.run)()
- theme_output = os.path.join(self.temp_path, 'theme')
+ theme_output = os.path.join(self.temp_path, "theme")
- for file in ['a_stylesheet', 'a_template']:
+ for file in ["a_stylesheet", "a_template"]:
self.assertTrue(os.path.exists(os.path.join(theme_output, file)))
def test_write_only_selected(self):
"""Test that only the selected files are written"""
- settings = read_settings(path=None, override={
- 'PATH': INPUT_PATH,
- 'OUTPUT_PATH': self.temp_path,
- 'CACHE_PATH': self.temp_cache,
- 'WRITE_SELECTED': [
- os.path.join(self.temp_path, 'oh-yeah.html'),
- os.path.join(self.temp_path, 'categories.html'),
- ],
- 'LOCALE': locale.normalize('en_US'),
- })
+ settings = read_settings(
+ path=None,
+ override={
+ "PATH": INPUT_PATH,
+ "OUTPUT_PATH": self.temp_path,
+ "CACHE_PATH": self.temp_cache,
+ "WRITE_SELECTED": [
+ os.path.join(self.temp_path, "oh-yeah.html"),
+ os.path.join(self.temp_path, "categories.html"),
+ ],
+ "LOCALE": locale.normalize("en_US"),
+ },
+ )
pelican = Pelican(settings=settings)
logger = logging.getLogger()
orig_level = logger.getEffectiveLevel()
logger.setLevel(logging.INFO)
mute(True)(pelican.run)()
logger.setLevel(orig_level)
- self.assertLogCountEqual(
- count=2,
- msg="Writing .*",
- level=logging.INFO)
+ self.assertLogCountEqual(count=2, msg="Writing .*", level=logging.INFO)
def test_cyclic_intersite_links_no_warnings(self):
- settings = read_settings(path=None, override={
- 'PATH': os.path.join(CURRENT_DIR, 'cyclic_intersite_links'),
- 'OUTPUT_PATH': self.temp_path,
- 'CACHE_PATH': self.temp_cache,
- })
+ settings = read_settings(
+ path=None,
+ override={
+ "PATH": os.path.join(CURRENT_DIR, "cyclic_intersite_links"),
+ "OUTPUT_PATH": self.temp_path,
+ "CACHE_PATH": self.temp_cache,
+ },
+ )
pelican = Pelican(settings=settings)
mute(True)(pelican.run)()
# There are four different intersite links:
@@ -230,41 +249,48 @@ def test_cyclic_intersite_links_no_warnings(self):
self.assertLogCountEqual(
count=1,
msg="Unable to find '.*\\.rst', skipping url replacement.",
- level=logging.WARNING)
+ level=logging.WARNING,
+ )
def test_md_extensions_deprecation(self):
"""Test that a warning is issued if MD_EXTENSIONS is used"""
- settings = read_settings(path=None, override={
- 'PATH': INPUT_PATH,
- 'OUTPUT_PATH': self.temp_path,
- 'CACHE_PATH': self.temp_cache,
- 'MD_EXTENSIONS': {},
- })
+ settings = read_settings(
+ path=None,
+ override={
+ "PATH": INPUT_PATH,
+ "OUTPUT_PATH": self.temp_path,
+ "CACHE_PATH": self.temp_cache,
+ "MD_EXTENSIONS": {},
+ },
+ )
pelican = Pelican(settings=settings)
mute(True)(pelican.run)()
self.assertLogCountEqual(
count=1,
msg="MD_EXTENSIONS is deprecated use MARKDOWN instead.",
- level=logging.WARNING)
+ level=logging.WARNING,
+ )
def test_parse_errors(self):
# Verify that just an error is printed and the application doesn't
# abort, exit or something.
- settings = read_settings(path=None, override={
- 'PATH': os.path.abspath(os.path.join(CURRENT_DIR, 'parse_error')),
- 'OUTPUT_PATH': self.temp_path,
- 'CACHE_PATH': self.temp_cache,
- })
+ settings = read_settings(
+ path=None,
+ override={
+ "PATH": os.path.abspath(os.path.join(CURRENT_DIR, "parse_error")),
+ "OUTPUT_PATH": self.temp_path,
+ "CACHE_PATH": self.temp_cache,
+ },
+ )
pelican = Pelican(settings=settings)
mute(True)(pelican.run)()
self.assertLogCountEqual(
- count=1,
- msg="Could not process .*parse_error.rst",
- level=logging.ERROR)
+ count=1, msg="Could not process .*parse_error.rst", level=logging.ERROR
+ )
def test_module_load(self):
"""Test loading via python -m pelican --help displays the help"""
- output = subprocess.check_output([
- sys.executable, '-m', 'pelican', '--help'
- ]).decode('ascii', 'replace')
- assert 'usage:' in output
+ output = subprocess.check_output(
+ [sys.executable, "-m", "pelican", "--help"]
+ ).decode("ascii", "replace")
+ assert "usage:" in output
diff --git a/pelican/tests/test_plugins.py b/pelican/tests/test_plugins.py
--- a/pelican/tests/test_plugins.py
+++ b/pelican/tests/test_plugins.py
@@ -2,27 +2,26 @@
from contextlib import contextmanager
import pelican.tests.dummy_plugins.normal_plugin.normal_plugin as normal_plugin
-from pelican.plugins._utils import (get_namespace_plugins, get_plugin_name,
- load_plugins)
+from pelican.plugins._utils import get_namespace_plugins, get_plugin_name, load_plugins
from pelican.tests.support import unittest
@contextmanager
def tmp_namespace_path(path):
- '''Context manager for temporarily appending namespace plugin packages
+ """Context manager for temporarily appending namespace plugin packages
path: path containing the `pelican` folder
This modifies the `pelican.__path__` and lets the `pelican.plugins`
namespace package resolve it from that.
- '''
+ """
# This avoids calls to internal `pelican.plugins.__path__._recalculate()`
# as it should not be necessary
import pelican
old_path = pelican.__path__[:]
try:
- pelican.__path__.append(os.path.join(path, 'pelican'))
+ pelican.__path__.append(os.path.join(path, "pelican"))
yield
finally:
pelican.__path__ = old_path
@@ -30,38 +29,38 @@ def tmp_namespace_path(path):
class PluginTest(unittest.TestCase):
_PLUGIN_FOLDER = os.path.join(
- os.path.abspath(os.path.dirname(__file__)),
- 'dummy_plugins')
- _NS_PLUGIN_FOLDER = os.path.join(_PLUGIN_FOLDER, 'namespace_plugin')
- _NORMAL_PLUGIN_FOLDER = os.path.join(_PLUGIN_FOLDER, 'normal_plugin')
+ os.path.abspath(os.path.dirname(__file__)), "dummy_plugins"
+ )
+ _NS_PLUGIN_FOLDER = os.path.join(_PLUGIN_FOLDER, "namespace_plugin")
+ _NORMAL_PLUGIN_FOLDER = os.path.join(_PLUGIN_FOLDER, "normal_plugin")
def test_namespace_path_modification(self):
import pelican
import pelican.plugins
+
old_path = pelican.__path__[:]
# not existing path
- path = os.path.join(self._PLUGIN_FOLDER, 'foo')
+ path = os.path.join(self._PLUGIN_FOLDER, "foo")
with tmp_namespace_path(path):
- self.assertIn(
- os.path.join(path, 'pelican'),
- pelican.__path__)
+ self.assertIn(os.path.join(path, "pelican"), pelican.__path__)
# foo/pelican does not exist, so it won't propagate
self.assertNotIn(
- os.path.join(path, 'pelican', 'plugins'),
- pelican.plugins.__path__)
+ os.path.join(path, "pelican", "plugins"), pelican.plugins.__path__
+ )
# verify that we restored path back
self.assertEqual(pelican.__path__, old_path)
# existing path
with tmp_namespace_path(self._NS_PLUGIN_FOLDER):
self.assertIn(
- os.path.join(self._NS_PLUGIN_FOLDER, 'pelican'),
- pelican.__path__)
+ os.path.join(self._NS_PLUGIN_FOLDER, "pelican"), pelican.__path__
+ )
# /namespace_plugin/pelican exists, so it should be in
self.assertIn(
- os.path.join(self._NS_PLUGIN_FOLDER, 'pelican', 'plugins'),
- pelican.plugins.__path__)
+ os.path.join(self._NS_PLUGIN_FOLDER, "pelican", "plugins"),
+ pelican.plugins.__path__,
+ )
self.assertEqual(pelican.__path__, old_path)
def test_get_namespace_plugins(self):
@@ -71,11 +70,11 @@ def test_get_namespace_plugins(self):
# with plugin
with tmp_namespace_path(self._NS_PLUGIN_FOLDER):
ns_plugins = get_namespace_plugins()
- self.assertEqual(len(ns_plugins), len(existing_ns_plugins)+1)
- self.assertIn('pelican.plugins.ns_plugin', ns_plugins)
+ self.assertEqual(len(ns_plugins), len(existing_ns_plugins) + 1)
+ self.assertIn("pelican.plugins.ns_plugin", ns_plugins)
self.assertEqual(
- ns_plugins['pelican.plugins.ns_plugin'].NAME,
- 'namespace plugin')
+ ns_plugins["pelican.plugins.ns_plugin"].NAME, "namespace plugin"
+ )
# should be back to existing namespace plugins outside `with`
ns_plugins = get_namespace_plugins()
@@ -91,15 +90,14 @@ def get_plugin_names(plugins):
with tmp_namespace_path(self._NS_PLUGIN_FOLDER):
# with no `PLUGINS` setting, load namespace plugins
plugins = load_plugins({})
- self.assertEqual(len(plugins), len(existing_ns_plugins)+1, plugins)
+ self.assertEqual(len(plugins), len(existing_ns_plugins) + 1, plugins)
self.assertEqual(
- {'pelican.plugins.ns_plugin'} | get_plugin_names(existing_ns_plugins),
- get_plugin_names(plugins))
+ {"pelican.plugins.ns_plugin"} | get_plugin_names(existing_ns_plugins),
+ get_plugin_names(plugins),
+ )
# disable namespace plugins with `PLUGINS = []`
- SETTINGS = {
- 'PLUGINS': []
- }
+ SETTINGS = {"PLUGINS": []}
plugins = load_plugins(SETTINGS)
self.assertEqual(len(plugins), 0, plugins)
@@ -107,34 +105,35 @@ def get_plugin_names(plugins):
# normal plugin
SETTINGS = {
- 'PLUGINS': ['normal_plugin'],
- 'PLUGIN_PATHS': [self._NORMAL_PLUGIN_FOLDER]
+ "PLUGINS": ["normal_plugin"],
+ "PLUGIN_PATHS": [self._NORMAL_PLUGIN_FOLDER],
}
plugins = load_plugins(SETTINGS)
self.assertEqual(len(plugins), 1, plugins)
- self.assertEqual(
- {'normal_plugin'},
- get_plugin_names(plugins))
+ self.assertEqual({"normal_plugin"}, get_plugin_names(plugins))
# normal submodule/subpackage plugins
SETTINGS = {
- 'PLUGINS': [
- 'normal_submodule_plugin.subplugin',
- 'normal_submodule_plugin.subpackage.subpackage',
+ "PLUGINS": [
+ "normal_submodule_plugin.subplugin",
+ "normal_submodule_plugin.subpackage.subpackage",
],
- 'PLUGIN_PATHS': [self._NORMAL_PLUGIN_FOLDER]
+ "PLUGIN_PATHS": [self._NORMAL_PLUGIN_FOLDER],
}
plugins = load_plugins(SETTINGS)
self.assertEqual(len(plugins), 2, plugins)
self.assertEqual(
- {'normal_submodule_plugin.subplugin',
- 'normal_submodule_plugin.subpackage.subpackage'},
- get_plugin_names(plugins))
+ {
+ "normal_submodule_plugin.subplugin",
+ "normal_submodule_plugin.subpackage.subpackage",
+ },
+ get_plugin_names(plugins),
+ )
# ensure normal plugins are loaded only once
SETTINGS = {
- 'PLUGINS': ['normal_plugin'],
- 'PLUGIN_PATHS': [self._NORMAL_PLUGIN_FOLDER],
+ "PLUGINS": ["normal_plugin"],
+ "PLUGIN_PATHS": [self._NORMAL_PLUGIN_FOLDER],
}
plugins = load_plugins(SETTINGS)
for plugin in load_plugins(SETTINGS):
@@ -143,40 +142,33 @@ def get_plugin_names(plugins):
self.assertIn(plugin, plugins)
# namespace plugin short
- SETTINGS = {
- 'PLUGINS': ['ns_plugin']
- }
+ SETTINGS = {"PLUGINS": ["ns_plugin"]}
plugins = load_plugins(SETTINGS)
self.assertEqual(len(plugins), 1, plugins)
- self.assertEqual(
- {'pelican.plugins.ns_plugin'},
- get_plugin_names(plugins))
+ self.assertEqual({"pelican.plugins.ns_plugin"}, get_plugin_names(plugins))
# namespace plugin long
- SETTINGS = {
- 'PLUGINS': ['pelican.plugins.ns_plugin']
- }
+ SETTINGS = {"PLUGINS": ["pelican.plugins.ns_plugin"]}
plugins = load_plugins(SETTINGS)
self.assertEqual(len(plugins), 1, plugins)
- self.assertEqual(
- {'pelican.plugins.ns_plugin'},
- get_plugin_names(plugins))
+ self.assertEqual({"pelican.plugins.ns_plugin"}, get_plugin_names(plugins))
# normal and namespace plugin
SETTINGS = {
- 'PLUGINS': ['normal_plugin', 'ns_plugin'],
- 'PLUGIN_PATHS': [self._NORMAL_PLUGIN_FOLDER]
+ "PLUGINS": ["normal_plugin", "ns_plugin"],
+ "PLUGIN_PATHS": [self._NORMAL_PLUGIN_FOLDER],
}
plugins = load_plugins(SETTINGS)
self.assertEqual(len(plugins), 2, plugins)
self.assertEqual(
- {'normal_plugin', 'pelican.plugins.ns_plugin'},
- get_plugin_names(plugins))
+ {"normal_plugin", "pelican.plugins.ns_plugin"},
+ get_plugin_names(plugins),
+ )
def test_get_plugin_name(self):
self.assertEqual(
get_plugin_name(normal_plugin),
- 'pelican.tests.dummy_plugins.normal_plugin.normal_plugin',
+ "pelican.tests.dummy_plugins.normal_plugin.normal_plugin",
)
class NoopPlugin:
@@ -185,7 +177,9 @@ def register(self):
self.assertEqual(
get_plugin_name(NoopPlugin),
- 'PluginTest.test_get_plugin_name.<locals>.NoopPlugin')
+ "PluginTest.test_get_plugin_name.<locals>.NoopPlugin",
+ )
self.assertEqual(
get_plugin_name(NoopPlugin()),
- 'PluginTest.test_get_plugin_name.<locals>.NoopPlugin')
+ "PluginTest.test_get_plugin_name.<locals>.NoopPlugin",
+ )
diff --git a/pelican/tests/test_readers.py b/pelican/tests/test_readers.py
--- a/pelican/tests/test_readers.py
+++ b/pelican/tests/test_readers.py
@@ -7,7 +7,7 @@
CUR_DIR = os.path.dirname(__file__)
-CONTENT_PATH = os.path.join(CUR_DIR, 'content')
+CONTENT_PATH = os.path.join(CUR_DIR, "content")
def _path(*args):
@@ -15,7 +15,6 @@ def _path(*args):
class ReaderTest(unittest.TestCase):
-
def read_file(self, path, **kwargs):
# Isolate from future API changes to readers.read_file
@@ -29,26 +28,24 @@ def assertDictHasSubset(self, dictionary, subset):
self.assertEqual(
value,
real_value,
- 'Expected %s to have value %s, but was %s' %
- (key, value, real_value))
+ "Expected %s to have value %s, but was %s"
+ % (key, value, real_value),
+ )
else:
self.fail(
- 'Expected %s to have value %s, but was not in Dict' %
- (key, value))
+ "Expected %s to have value %s, but was not in Dict" % (key, value)
+ )
class TestAssertDictHasSubset(ReaderTest):
def setUp(self):
- self.dictionary = {
- 'key-a': 'val-a',
- 'key-b': 'val-b'
- }
+ self.dictionary = {"key-a": "val-a", "key-b": "val-b"}
def tearDown(self):
self.dictionary = None
def test_subset(self):
- self.assertDictHasSubset(self.dictionary, {'key-a': 'val-a'})
+ self.assertDictHasSubset(self.dictionary, {"key-a": "val-a"})
def test_equal(self):
self.assertDictHasSubset(self.dictionary, self.dictionary)
@@ -56,269 +53,260 @@ def test_equal(self):
def test_fail_not_set(self):
self.assertRaisesRegex(
AssertionError,
- r'Expected.*key-c.*to have value.*val-c.*but was not in Dict',
+ r"Expected.*key-c.*to have value.*val-c.*but was not in Dict",
self.assertDictHasSubset,
self.dictionary,
- {'key-c': 'val-c'})
+ {"key-c": "val-c"},
+ )
def test_fail_wrong_val(self):
self.assertRaisesRegex(
AssertionError,
- r'Expected .*key-a.* to have value .*val-b.* but was .*val-a.*',
+ r"Expected .*key-a.* to have value .*val-b.* but was .*val-a.*",
self.assertDictHasSubset,
self.dictionary,
- {'key-a': 'val-b'})
+ {"key-a": "val-b"},
+ )
class DefaultReaderTest(ReaderTest):
-
def test_readfile_unknown_extension(self):
with self.assertRaises(TypeError):
- self.read_file(path='article_with_metadata.unknownextension')
+ self.read_file(path="article_with_metadata.unknownextension")
def test_readfile_path_metadata_implicit_dates(self):
- test_file = 'article_with_metadata_implicit_dates.html'
- page = self.read_file(path=test_file, DEFAULT_DATE='fs')
+ test_file = "article_with_metadata_implicit_dates.html"
+ page = self.read_file(path=test_file, DEFAULT_DATE="fs")
expected = {
- 'date': SafeDatetime.fromtimestamp(
- os.stat(_path(test_file)).st_mtime),
- 'modified': SafeDatetime.fromtimestamp(
- os.stat(_path(test_file)).st_mtime)
+ "date": SafeDatetime.fromtimestamp(os.stat(_path(test_file)).st_mtime),
+ "modified": SafeDatetime.fromtimestamp(os.stat(_path(test_file)).st_mtime),
}
self.assertDictHasSubset(page.metadata, expected)
def test_readfile_path_metadata_explicit_dates(self):
- test_file = 'article_with_metadata_explicit_dates.html'
- page = self.read_file(path=test_file, DEFAULT_DATE='fs')
+ test_file = "article_with_metadata_explicit_dates.html"
+ page = self.read_file(path=test_file, DEFAULT_DATE="fs")
expected = {
- 'date': SafeDatetime(2010, 12, 2, 10, 14),
- 'modified': SafeDatetime(2010, 12, 31, 23, 59)
+ "date": SafeDatetime(2010, 12, 2, 10, 14),
+ "modified": SafeDatetime(2010, 12, 31, 23, 59),
}
self.assertDictHasSubset(page.metadata, expected)
def test_readfile_path_metadata_implicit_date_explicit_modified(self):
- test_file = 'article_with_metadata_implicit_date_explicit_modified.html'
- page = self.read_file(path=test_file, DEFAULT_DATE='fs')
+ test_file = "article_with_metadata_implicit_date_explicit_modified.html"
+ page = self.read_file(path=test_file, DEFAULT_DATE="fs")
expected = {
- 'date': SafeDatetime.fromtimestamp(
- os.stat(_path(test_file)).st_mtime),
- 'modified': SafeDatetime(2010, 12, 2, 10, 14),
+ "date": SafeDatetime.fromtimestamp(os.stat(_path(test_file)).st_mtime),
+ "modified": SafeDatetime(2010, 12, 2, 10, 14),
}
self.assertDictHasSubset(page.metadata, expected)
def test_readfile_path_metadata_explicit_date_implicit_modified(self):
- test_file = 'article_with_metadata_explicit_date_implicit_modified.html'
- page = self.read_file(path=test_file, DEFAULT_DATE='fs')
+ test_file = "article_with_metadata_explicit_date_implicit_modified.html"
+ page = self.read_file(path=test_file, DEFAULT_DATE="fs")
expected = {
- 'date': SafeDatetime(2010, 12, 2, 10, 14),
- 'modified': SafeDatetime.fromtimestamp(
- os.stat(_path(test_file)).st_mtime)
+ "date": SafeDatetime(2010, 12, 2, 10, 14),
+ "modified": SafeDatetime.fromtimestamp(os.stat(_path(test_file)).st_mtime),
}
self.assertDictHasSubset(page.metadata, expected)
def test_find_empty_alt(self):
- with patch('pelican.readers.logger') as log_mock:
- content = ['<img alt="" src="test-image.png" width="300px" />',
- '<img src="test-image.png" width="300px" alt="" />']
+ with patch("pelican.readers.logger") as log_mock:
+ content = [
+ '<img alt="" src="test-image.png" width="300px" />',
+ '<img src="test-image.png" width="300px" alt="" />',
+ ]
for tag in content:
- readers.find_empty_alt(tag, '/test/path')
+ readers.find_empty_alt(tag, "/test/path")
log_mock.warning.assert_called_with(
- 'Empty alt attribute for image %s in %s',
- 'test-image.png',
- '/test/path',
- extra={'limit_msg':
- 'Other images have empty alt attributes'}
+ "Empty alt attribute for image %s in %s",
+ "test-image.png",
+ "/test/path",
+ extra={"limit_msg": "Other images have empty alt attributes"},
)
class RstReaderTest(ReaderTest):
-
def test_article_with_metadata(self):
- page = self.read_file(path='article_with_metadata.rst')
+ page = self.read_file(path="article_with_metadata.rst")
expected = {
- 'category': 'yeah',
- 'author': 'Alexis Métaireau',
- 'title': 'This is a super article !',
- 'summary': '<p class="first last">Multi-line metadata should be'
- ' supported\nas well as <strong>inline'
- ' markup</strong> and stuff to "typogrify'
- '"...</p>\n',
- 'date': SafeDatetime(2010, 12, 2, 10, 14),
- 'modified': SafeDatetime(2010, 12, 2, 10, 20),
- 'tags': ['foo', 'bar', 'foobar'],
- 'custom_field': 'http://notmyidea.org',
+ "category": "yeah",
+ "author": "Alexis Métaireau",
+ "title": "This is a super article !",
+ "summary": '<p class="first last">Multi-line metadata should be'
+ " supported\nas well as <strong>inline"
+ " markup</strong> and stuff to "typogrify"
+ ""...</p>\n",
+ "date": SafeDatetime(2010, 12, 2, 10, 14),
+ "modified": SafeDatetime(2010, 12, 2, 10, 20),
+ "tags": ["foo", "bar", "foobar"],
+ "custom_field": "http://notmyidea.org",
}
self.assertDictHasSubset(page.metadata, expected)
def test_article_with_capitalized_metadata(self):
- page = self.read_file(path='article_with_capitalized_metadata.rst')
+ page = self.read_file(path="article_with_capitalized_metadata.rst")
expected = {
- 'category': 'yeah',
- 'author': 'Alexis Métaireau',
- 'title': 'This is a super article !',
- 'summary': '<p class="first last">Multi-line metadata should be'
- ' supported\nas well as <strong>inline'
- ' markup</strong> and stuff to "typogrify'
- '"...</p>\n',
- 'date': SafeDatetime(2010, 12, 2, 10, 14),
- 'modified': SafeDatetime(2010, 12, 2, 10, 20),
- 'tags': ['foo', 'bar', 'foobar'],
- 'custom_field': 'http://notmyidea.org',
+ "category": "yeah",
+ "author": "Alexis Métaireau",
+ "title": "This is a super article !",
+ "summary": '<p class="first last">Multi-line metadata should be'
+ " supported\nas well as <strong>inline"
+ " markup</strong> and stuff to "typogrify"
+ ""...</p>\n",
+ "date": SafeDatetime(2010, 12, 2, 10, 14),
+ "modified": SafeDatetime(2010, 12, 2, 10, 20),
+ "tags": ["foo", "bar", "foobar"],
+ "custom_field": "http://notmyidea.org",
}
self.assertDictHasSubset(page.metadata, expected)
def test_article_with_filename_metadata(self):
page = self.read_file(
- path='2012-11-29_rst_w_filename_meta#foo-bar.rst',
- FILENAME_METADATA=None)
+ path="2012-11-29_rst_w_filename_meta#foo-bar.rst", FILENAME_METADATA=None
+ )
expected = {
- 'category': 'yeah',
- 'author': 'Alexis Métaireau',
- 'title': 'Rst with filename metadata',
- 'reader': 'rst',
+ "category": "yeah",
+ "author": "Alexis Métaireau",
+ "title": "Rst with filename metadata",
+ "reader": "rst",
}
self.assertDictHasSubset(page.metadata, expected)
page = self.read_file(
- path='2012-11-29_rst_w_filename_meta#foo-bar.rst',
- FILENAME_METADATA=r'(?P<date>\d{4}-\d{2}-\d{2}).*')
+ path="2012-11-29_rst_w_filename_meta#foo-bar.rst",
+ FILENAME_METADATA=r"(?P<date>\d{4}-\d{2}-\d{2}).*",
+ )
expected = {
- 'category': 'yeah',
- 'author': 'Alexis Métaireau',
- 'title': 'Rst with filename metadata',
- 'date': SafeDatetime(2012, 11, 29),
- 'reader': 'rst',
+ "category": "yeah",
+ "author": "Alexis Métaireau",
+ "title": "Rst with filename metadata",
+ "date": SafeDatetime(2012, 11, 29),
+ "reader": "rst",
}
self.assertDictHasSubset(page.metadata, expected)
page = self.read_file(
- path='2012-11-29_rst_w_filename_meta#foo-bar.rst',
+ path="2012-11-29_rst_w_filename_meta#foo-bar.rst",
FILENAME_METADATA=(
- r'(?P<date>\d{4}-\d{2}-\d{2})'
- r'_(?P<Slug>.*)'
- r'#(?P<MyMeta>.*)-(?P<author>.*)'))
+ r"(?P<date>\d{4}-\d{2}-\d{2})"
+ r"_(?P<Slug>.*)"
+ r"#(?P<MyMeta>.*)-(?P<author>.*)"
+ ),
+ )
expected = {
- 'category': 'yeah',
- 'author': 'Alexis Métaireau',
- 'title': 'Rst with filename metadata',
- 'date': SafeDatetime(2012, 11, 29),
- 'slug': 'rst_w_filename_meta',
- 'mymeta': 'foo',
- 'reader': 'rst',
+ "category": "yeah",
+ "author": "Alexis Métaireau",
+ "title": "Rst with filename metadata",
+ "date": SafeDatetime(2012, 11, 29),
+ "slug": "rst_w_filename_meta",
+ "mymeta": "foo",
+ "reader": "rst",
}
self.assertDictHasSubset(page.metadata, expected)
def test_article_with_optional_filename_metadata(self):
page = self.read_file(
- path='2012-11-29_rst_w_filename_meta#foo-bar.rst',
- FILENAME_METADATA=r'(?P<date>\d{4}-\d{2}-\d{2})?')
+ path="2012-11-29_rst_w_filename_meta#foo-bar.rst",
+ FILENAME_METADATA=r"(?P<date>\d{4}-\d{2}-\d{2})?",
+ )
expected = {
- 'date': SafeDatetime(2012, 11, 29),
- 'reader': 'rst',
+ "date": SafeDatetime(2012, 11, 29),
+ "reader": "rst",
}
self.assertDictHasSubset(page.metadata, expected)
page = self.read_file(
- path='article.rst',
- FILENAME_METADATA=r'(?P<date>\d{4}-\d{2}-\d{2})?')
+ path="article.rst", FILENAME_METADATA=r"(?P<date>\d{4}-\d{2}-\d{2})?"
+ )
expected = {
- 'reader': 'rst',
+ "reader": "rst",
}
self.assertDictHasSubset(page.metadata, expected)
- self.assertNotIn('date', page.metadata, 'Date should not be set.')
+ self.assertNotIn("date", page.metadata, "Date should not be set.")
def test_article_metadata_key_lowercase(self):
# Keys of metadata should be lowercase.
reader = readers.RstReader(settings=get_settings())
- content, metadata = reader.read(
- _path('article_with_uppercase_metadata.rst'))
+ content, metadata = reader.read(_path("article_with_uppercase_metadata.rst"))
- self.assertIn('category', metadata, 'Key should be lowercase.')
- self.assertEqual('Yeah', metadata.get('category'),
- 'Value keeps case.')
+ self.assertIn("category", metadata, "Key should be lowercase.")
+ self.assertEqual("Yeah", metadata.get("category"), "Value keeps case.")
def test_article_extra_path_metadata(self):
- input_with_metadata = '2012-11-29_rst_w_filename_meta#foo-bar.rst'
+ input_with_metadata = "2012-11-29_rst_w_filename_meta#foo-bar.rst"
page_metadata = self.read_file(
path=input_with_metadata,
FILENAME_METADATA=(
- r'(?P<date>\d{4}-\d{2}-\d{2})'
- r'_(?P<Slug>.*)'
- r'#(?P<MyMeta>.*)-(?P<author>.*)'
+ r"(?P<date>\d{4}-\d{2}-\d{2})"
+ r"_(?P<Slug>.*)"
+ r"#(?P<MyMeta>.*)-(?P<author>.*)"
),
EXTRA_PATH_METADATA={
- input_with_metadata: {
- 'key-1a': 'value-1a',
- 'key-1b': 'value-1b'
- }
- }
+ input_with_metadata: {"key-1a": "value-1a", "key-1b": "value-1b"}
+ },
)
expected_metadata = {
- 'category': 'yeah',
- 'author': 'Alexis Métaireau',
- 'title': 'Rst with filename metadata',
- 'date': SafeDatetime(2012, 11, 29),
- 'slug': 'rst_w_filename_meta',
- 'mymeta': 'foo',
- 'reader': 'rst',
- 'key-1a': 'value-1a',
- 'key-1b': 'value-1b'
+ "category": "yeah",
+ "author": "Alexis Métaireau",
+ "title": "Rst with filename metadata",
+ "date": SafeDatetime(2012, 11, 29),
+ "slug": "rst_w_filename_meta",
+ "mymeta": "foo",
+ "reader": "rst",
+ "key-1a": "value-1a",
+ "key-1b": "value-1b",
}
self.assertDictHasSubset(page_metadata.metadata, expected_metadata)
- input_file_path_without_metadata = 'article.rst'
+ input_file_path_without_metadata = "article.rst"
page_without_metadata = self.read_file(
path=input_file_path_without_metadata,
EXTRA_PATH_METADATA={
- input_file_path_without_metadata: {
- 'author': 'Charlès Overwrite'
- }
- }
+ input_file_path_without_metadata: {"author": "Charlès Overwrite"}
+ },
)
expected_without_metadata = {
- 'category': 'misc',
- 'author': 'Charlès Overwrite',
- 'title': 'Article title',
- 'reader': 'rst',
+ "category": "misc",
+ "author": "Charlès Overwrite",
+ "title": "Article title",
+ "reader": "rst",
}
self.assertDictHasSubset(
- page_without_metadata.metadata,
- expected_without_metadata)
+ page_without_metadata.metadata, expected_without_metadata
+ )
def test_article_extra_path_metadata_dont_overwrite(self):
# EXTRA_PATH_METADATA['author'] should get ignored
# since we don't overwrite already set values
- input_file_path = '2012-11-29_rst_w_filename_meta#foo-bar.rst'
+ input_file_path = "2012-11-29_rst_w_filename_meta#foo-bar.rst"
page = self.read_file(
path=input_file_path,
FILENAME_METADATA=(
- r'(?P<date>\d{4}-\d{2}-\d{2})'
- r'_(?P<Slug>.*)'
- r'#(?P<MyMeta>.*)-(?P<orginalauthor>.*)'
+ r"(?P<date>\d{4}-\d{2}-\d{2})"
+ r"_(?P<Slug>.*)"
+ r"#(?P<MyMeta>.*)-(?P<orginalauthor>.*)"
),
EXTRA_PATH_METADATA={
- input_file_path: {
- 'author': 'Charlès Overwrite',
- 'key-1b': 'value-1b'
- }
- }
+ input_file_path: {"author": "Charlès Overwrite", "key-1b": "value-1b"}
+ },
)
expected = {
- 'category': 'yeah',
- 'author': 'Alexis Métaireau',
- 'title': 'Rst with filename metadata',
- 'date': SafeDatetime(2012, 11, 29),
- 'slug': 'rst_w_filename_meta',
- 'mymeta': 'foo',
- 'reader': 'rst',
- 'key-1b': 'value-1b'
+ "category": "yeah",
+ "author": "Alexis Métaireau",
+ "title": "Rst with filename metadata",
+ "date": SafeDatetime(2012, 11, 29),
+ "slug": "rst_w_filename_meta",
+ "mymeta": "foo",
+ "reader": "rst",
+ "key-1b": "value-1b",
}
self.assertDictHasSubset(page.metadata, expected)
@@ -328,15 +316,19 @@ def test_article_extra_path_metadata_recurse(self):
path = "TestCategory/article_without_category.rst"
epm = {
- parent: {'epmr_inherit': parent,
- 'epmr_override': parent, },
- notparent: {'epmr_bogus': notparent},
- path: {'epmr_override': path, },
- }
+ parent: {
+ "epmr_inherit": parent,
+ "epmr_override": parent,
+ },
+ notparent: {"epmr_bogus": notparent},
+ path: {
+ "epmr_override": path,
+ },
+ }
expected_metadata = {
- 'epmr_inherit': parent,
- 'epmr_override': path,
- }
+ "epmr_inherit": parent,
+ "epmr_override": path,
+ }
page = self.read_file(path=path, EXTRA_PATH_METADATA=epm)
self.assertDictHasSubset(page.metadata, expected_metadata)
@@ -357,152 +349,157 @@ def test_article_extra_path_metadata_recurse(self):
def test_typogrify(self):
# if nothing is specified in the settings, the content should be
# unmodified
- page = self.read_file(path='article.rst')
- expected = ('<p>THIS is some content. With some stuff to '
- '"typogrify"...</p>\n<p>Now with added '
- 'support for <abbr title="three letter acronym">'
- 'TLA</abbr>.</p>\n')
+ page = self.read_file(path="article.rst")
+ expected = (
+ "<p>THIS is some content. With some stuff to "
+ ""typogrify"...</p>\n<p>Now with added "
+ 'support for <abbr title="three letter acronym">'
+ "TLA</abbr>.</p>\n"
+ )
self.assertEqual(page.content, expected)
try:
# otherwise, typogrify should be applied
- page = self.read_file(path='article.rst', TYPOGRIFY=True)
+ page = self.read_file(path="article.rst", TYPOGRIFY=True)
expected = (
'<p><span class="caps">THIS</span> is some content. '
- 'With some stuff to “typogrify”…</p>\n'
+ "With some stuff to “typogrify”…</p>\n"
'<p>Now with added support for <abbr title="three letter '
- 'acronym"><span class="caps">TLA</span></abbr>.</p>\n')
+ 'acronym"><span class="caps">TLA</span></abbr>.</p>\n'
+ )
self.assertEqual(page.content, expected)
except ImportError:
- return unittest.skip('need the typogrify distribution')
+ return unittest.skip("need the typogrify distribution")
def test_typogrify_summary(self):
# if nothing is specified in the settings, the summary should be
# unmodified
- page = self.read_file(path='article_with_metadata.rst')
- expected = ('<p class="first last">Multi-line metadata should be'
- ' supported\nas well as <strong>inline'
- ' markup</strong> and stuff to "typogrify'
- '"...</p>\n')
+ page = self.read_file(path="article_with_metadata.rst")
+ expected = (
+ '<p class="first last">Multi-line metadata should be'
+ " supported\nas well as <strong>inline"
+ " markup</strong> and stuff to "typogrify"
+ ""...</p>\n"
+ )
- self.assertEqual(page.metadata['summary'], expected)
+ self.assertEqual(page.metadata["summary"], expected)
try:
# otherwise, typogrify should be applied
- page = self.read_file(path='article_with_metadata.rst',
- TYPOGRIFY=True)
- expected = ('<p class="first last">Multi-line metadata should be'
- ' supported\nas well as <strong>inline'
- ' markup</strong> and stuff to “typogrify'
- '”…</p>\n')
-
- self.assertEqual(page.metadata['summary'], expected)
+ page = self.read_file(path="article_with_metadata.rst", TYPOGRIFY=True)
+ expected = (
+ '<p class="first last">Multi-line metadata should be'
+ " supported\nas well as <strong>inline"
+ " markup</strong> and stuff to “typogrify"
+ "”…</p>\n"
+ )
+
+ self.assertEqual(page.metadata["summary"], expected)
except ImportError:
- return unittest.skip('need the typogrify distribution')
+ return unittest.skip("need the typogrify distribution")
def test_typogrify_ignore_tags(self):
try:
# typogrify should be able to ignore user specified tags,
# but tries to be clever with widont extension
- page = self.read_file(path='article.rst', TYPOGRIFY=True,
- TYPOGRIFY_IGNORE_TAGS=['p'])
- expected = ('<p>THIS is some content. With some stuff to '
- '"typogrify"...</p>\n<p>Now with added '
- 'support for <abbr title="three letter acronym">'
- 'TLA</abbr>.</p>\n')
+ page = self.read_file(
+ path="article.rst", TYPOGRIFY=True, TYPOGRIFY_IGNORE_TAGS=["p"]
+ )
+ expected = (
+ "<p>THIS is some content. With some stuff to "
+ ""typogrify"...</p>\n<p>Now with added "
+ 'support for <abbr title="three letter acronym">'
+ "TLA</abbr>.</p>\n"
+ )
self.assertEqual(page.content, expected)
# typogrify should ignore code blocks by default because
# code blocks are composed inside the pre tag
- page = self.read_file(path='article_with_code_block.rst',
- TYPOGRIFY=True)
-
- expected = ('<p>An article with some code</p>\n'
- '<div class="highlight"><pre><span></span>'
- '<span class="n">x</span>'
- ' <span class="o">&</span>'
- ' <span class="n">y</span>\n</pre></div>\n'
- '<p>A block quote:</p>\n<blockquote>\nx '
- '<span class="amp">&</span> y</blockquote>\n'
- '<p>Normal:\nx'
- ' <span class="amp">&</span>'
- ' y'
- '</p>\n')
+ page = self.read_file(path="article_with_code_block.rst", TYPOGRIFY=True)
+
+ expected = (
+ "<p>An article with some code</p>\n"
+ '<div class="highlight"><pre><span></span>'
+ '<span class="n">x</span>'
+ ' <span class="o">&</span>'
+ ' <span class="n">y</span>\n</pre></div>\n'
+ "<p>A block quote:</p>\n<blockquote>\nx "
+ '<span class="amp">&</span> y</blockquote>\n'
+ "<p>Normal:\nx"
+ ' <span class="amp">&</span>'
+ " y"
+ "</p>\n"
+ )
self.assertEqual(page.content, expected)
# instruct typogrify to also ignore blockquotes
- page = self.read_file(path='article_with_code_block.rst',
- TYPOGRIFY=True,
- TYPOGRIFY_IGNORE_TAGS=['blockquote'])
-
- expected = ('<p>An article with some code</p>\n'
- '<div class="highlight"><pre><span>'
- '</span><span class="n">x</span>'
- ' <span class="o">&</span>'
- ' <span class="n">y</span>\n</pre></div>\n'
- '<p>A block quote:</p>\n<blockquote>\nx '
- '& y</blockquote>\n'
- '<p>Normal:\nx'
- ' <span class="amp">&</span>'
- ' y'
- '</p>\n')
+ page = self.read_file(
+ path="article_with_code_block.rst",
+ TYPOGRIFY=True,
+ TYPOGRIFY_IGNORE_TAGS=["blockquote"],
+ )
+
+ expected = (
+ "<p>An article with some code</p>\n"
+ '<div class="highlight"><pre><span>'
+ '</span><span class="n">x</span>'
+ ' <span class="o">&</span>'
+ ' <span class="n">y</span>\n</pre></div>\n'
+ "<p>A block quote:</p>\n<blockquote>\nx "
+ "& y</blockquote>\n"
+ "<p>Normal:\nx"
+ ' <span class="amp">&</span>'
+ " y"
+ "</p>\n"
+ )
self.assertEqual(page.content, expected)
except ImportError:
- return unittest.skip('need the typogrify distribution')
+ return unittest.skip("need the typogrify distribution")
except TypeError:
- return unittest.skip('need typogrify version 2.0.4 or later')
+ return unittest.skip("need typogrify version 2.0.4 or later")
def test_article_with_multiple_authors(self):
- page = self.read_file(path='article_with_multiple_authors.rst')
- expected = {
- 'authors': ['First Author', 'Second Author']
- }
+ page = self.read_file(path="article_with_multiple_authors.rst")
+ expected = {"authors": ["First Author", "Second Author"]}
self.assertDictHasSubset(page.metadata, expected)
def test_article_with_multiple_authors_semicolon(self):
- page = self.read_file(
- path='article_with_multiple_authors_semicolon.rst')
- expected = {
- 'authors': ['Author, First', 'Author, Second']
- }
+ page = self.read_file(path="article_with_multiple_authors_semicolon.rst")
+ expected = {"authors": ["Author, First", "Author, Second"]}
self.assertDictHasSubset(page.metadata, expected)
def test_article_with_multiple_authors_list(self):
- page = self.read_file(path='article_with_multiple_authors_list.rst')
- expected = {
- 'authors': ['Author, First', 'Author, Second']
- }
+ page = self.read_file(path="article_with_multiple_authors_list.rst")
+ expected = {"authors": ["Author, First", "Author, Second"]}
self.assertDictHasSubset(page.metadata, expected)
def test_default_date_formats(self):
- tuple_date = self.read_file(path='article.rst',
- DEFAULT_DATE=(2012, 5, 1))
- string_date = self.read_file(path='article.rst',
- DEFAULT_DATE='2012-05-01')
+ tuple_date = self.read_file(path="article.rst", DEFAULT_DATE=(2012, 5, 1))
+ string_date = self.read_file(path="article.rst", DEFAULT_DATE="2012-05-01")
- self.assertEqual(tuple_date.metadata['date'],
- string_date.metadata['date'])
+ self.assertEqual(tuple_date.metadata["date"], string_date.metadata["date"])
def test_parse_error(self):
# Verify that it raises an Exception, not nothing and not SystemExit or
# some such
with self.assertRaisesRegex(Exception, "underline too short"):
- self.read_file(path='../parse_error/parse_error.rst')
+ self.read_file(path="../parse_error/parse_error.rst")
def test_typogrify_dashes_config(self):
# Test default config
page = self.read_file(
- path='article_with_typogrify_dashes.rst',
+ path="article_with_typogrify_dashes.rst",
TYPOGRIFY=True,
- TYPOGRIFY_DASHES='default')
+ TYPOGRIFY_DASHES="default",
+ )
expected = "<p>One: -; Two: —; Three: —-</p>\n"
expected_title = "One -, two —, three —- dashes!"
@@ -511,9 +508,10 @@ def test_typogrify_dashes_config(self):
# Test 'oldschool' variant
page = self.read_file(
- path='article_with_typogrify_dashes.rst',
+ path="article_with_typogrify_dashes.rst",
TYPOGRIFY=True,
- TYPOGRIFY_DASHES='oldschool')
+ TYPOGRIFY_DASHES="oldschool",
+ )
expected = "<p>One: -; Two: –; Three: —</p>\n"
expected_title = "One -, two –, three — dashes!"
@@ -522,9 +520,10 @@ def test_typogrify_dashes_config(self):
# Test 'oldschool_inverted' variant
page = self.read_file(
- path='article_with_typogrify_dashes.rst',
+ path="article_with_typogrify_dashes.rst",
TYPOGRIFY=True,
- TYPOGRIFY_DASHES='oldschool_inverted')
+ TYPOGRIFY_DASHES="oldschool_inverted",
+ )
expected = "<p>One: -; Two: —; Three: –</p>\n"
expected_title = "One -, two —, three – dashes!"
@@ -534,75 +533,73 @@ def test_typogrify_dashes_config(self):
@unittest.skipUnless(readers.Markdown, "markdown isn't installed")
class MdReaderTest(ReaderTest):
-
def test_article_with_metadata(self):
reader = readers.MarkdownReader(settings=get_settings())
- content, metadata = reader.read(
- _path('article_with_md_extension.md'))
+ content, metadata = reader.read(_path("article_with_md_extension.md"))
expected = {
- 'category': 'test',
- 'title': 'Test md File',
- 'summary': '<p>I have a lot to test</p>',
- 'date': SafeDatetime(2010, 12, 2, 10, 14),
- 'modified': SafeDatetime(2010, 12, 2, 10, 20),
- 'tags': ['foo', 'bar', 'foobar'],
+ "category": "test",
+ "title": "Test md File",
+ "summary": "<p>I have a lot to test</p>",
+ "date": SafeDatetime(2010, 12, 2, 10, 14),
+ "modified": SafeDatetime(2010, 12, 2, 10, 20),
+ "tags": ["foo", "bar", "foobar"],
}
self.assertDictHasSubset(metadata, expected)
content, metadata = reader.read(
- _path('article_with_markdown_and_nonascii_summary.md'))
+ _path("article_with_markdown_and_nonascii_summary.md")
+ )
expected = {
- 'title': 'マックOS X 10.8でパイソンとVirtualenvをインストールと設定',
- 'summary': '<p>パイソンとVirtualenvをまっくでインストールする方法について明確に説明します。</p>',
- 'category': '指導書',
- 'date': SafeDatetime(2012, 12, 20),
- 'modified': SafeDatetime(2012, 12, 22),
- 'tags': ['パイソン', 'マック'],
- 'slug': 'python-virtualenv-on-mac-osx-mountain-lion-10.8',
+ "title": "マックOS X 10.8でパイソンとVirtualenvをインストールと設定",
+ "summary": "<p>パイソンとVirtualenvをまっくでインストールする方法について明確に説明します。</p>",
+ "category": "指導書",
+ "date": SafeDatetime(2012, 12, 20),
+ "modified": SafeDatetime(2012, 12, 22),
+ "tags": ["パイソン", "マック"],
+ "slug": "python-virtualenv-on-mac-osx-mountain-lion-10.8",
}
self.assertDictHasSubset(metadata, expected)
def test_article_with_footnote(self):
settings = get_settings()
- ec = settings['MARKDOWN']['extension_configs']
- ec['markdown.extensions.footnotes'] = {'SEPARATOR': '-'}
+ ec = settings["MARKDOWN"]["extension_configs"]
+ ec["markdown.extensions.footnotes"] = {"SEPARATOR": "-"}
reader = readers.MarkdownReader(settings)
- content, metadata = reader.read(
- _path('article_with_markdown_and_footnote.md'))
+ content, metadata = reader.read(_path("article_with_markdown_and_footnote.md"))
expected_content = (
- '<p>This is some content'
+ "<p>This is some content"
'<sup id="fnref-1"><a class="footnote-ref" href="#fn-1"'
- '>1</a></sup>'
- ' with some footnotes'
+ ">1</a></sup>"
+ " with some footnotes"
'<sup id="fnref-footnote"><a class="footnote-ref" '
'href="#fn-footnote">2</a></sup></p>\n'
-
'<div class="footnote">\n'
'<hr>\n<ol>\n<li id="fn-1">\n'
- '<p>Numbered footnote '
+ "<p>Numbered footnote "
'<a class="footnote-backref" href="#fnref-1" '
'title="Jump back to footnote 1 in the text">↩</a></p>\n'
'</li>\n<li id="fn-footnote">\n'
- '<p>Named footnote '
+ "<p>Named footnote "
'<a class="footnote-backref" href="#fnref-footnote"'
' title="Jump back to footnote 2 in the text">↩</a></p>\n'
- '</li>\n</ol>\n</div>')
+ "</li>\n</ol>\n</div>"
+ )
expected_metadata = {
- 'title': 'Article with markdown containing footnotes',
- 'summary': (
- '<p>Summary with <strong>inline</strong> markup '
- '<em>should</em> be supported.</p>'),
- 'date': SafeDatetime(2012, 10, 31),
- 'modified': SafeDatetime(2012, 11, 1),
- 'multiline': [
- 'Line Metadata should be handle properly.',
- 'See syntax of Meta-Data extension of '
- 'Python Markdown package:',
- 'If a line is indented by 4 or more spaces,',
- 'that line is assumed to be an additional line of the value',
- 'for the previous keyword.',
- 'A keyword may have as many lines as desired.',
- ]
+ "title": "Article with markdown containing footnotes",
+ "summary": (
+ "<p>Summary with <strong>inline</strong> markup "
+ "<em>should</em> be supported.</p>"
+ ),
+ "date": SafeDatetime(2012, 10, 31),
+ "modified": SafeDatetime(2012, 11, 1),
+ "multiline": [
+ "Line Metadata should be handle properly.",
+ "See syntax of Meta-Data extension of " "Python Markdown package:",
+ "If a line is indented by 4 or more spaces,",
+ "that line is assumed to be an additional line of the value",
+ "for the previous keyword.",
+ "A keyword may have as many lines as desired.",
+ ],
}
self.assertEqual(content, expected_content)
self.assertDictHasSubset(metadata, expected_metadata)
@@ -611,163 +608,173 @@ def test_article_with_file_extensions(self):
reader = readers.MarkdownReader(settings=get_settings())
# test to ensure the md file extension is being processed by the
# correct reader
- content, metadata = reader.read(
- _path('article_with_md_extension.md'))
+ content, metadata = reader.read(_path("article_with_md_extension.md"))
expected = (
"<h1>Test Markdown File Header</h1>\n"
"<h2>Used for pelican test</h2>\n"
- "<p>The quick brown fox jumped over the lazy dog's back.</p>")
+ "<p>The quick brown fox jumped over the lazy dog's back.</p>"
+ )
self.assertEqual(content, expected)
# test to ensure the mkd file extension is being processed by the
# correct reader
- content, metadata = reader.read(
- _path('article_with_mkd_extension.mkd'))
- expected = ("<h1>Test Markdown File Header</h1>\n<h2>Used for pelican"
- " test</h2>\n<p>This is another markdown test file. Uses"
- " the mkd extension.</p>")
+ content, metadata = reader.read(_path("article_with_mkd_extension.mkd"))
+ expected = (
+ "<h1>Test Markdown File Header</h1>\n<h2>Used for pelican"
+ " test</h2>\n<p>This is another markdown test file. Uses"
+ " the mkd extension.</p>"
+ )
self.assertEqual(content, expected)
# test to ensure the markdown file extension is being processed by the
# correct reader
content, metadata = reader.read(
- _path('article_with_markdown_extension.markdown'))
- expected = ("<h1>Test Markdown File Header</h1>\n<h2>Used for pelican"
- " test</h2>\n<p>This is another markdown test file. Uses"
- " the markdown extension.</p>")
+ _path("article_with_markdown_extension.markdown")
+ )
+ expected = (
+ "<h1>Test Markdown File Header</h1>\n<h2>Used for pelican"
+ " test</h2>\n<p>This is another markdown test file. Uses"
+ " the markdown extension.</p>"
+ )
self.assertEqual(content, expected)
# test to ensure the mdown file extension is being processed by the
# correct reader
- content, metadata = reader.read(
- _path('article_with_mdown_extension.mdown'))
- expected = ("<h1>Test Markdown File Header</h1>\n<h2>Used for pelican"
- " test</h2>\n<p>This is another markdown test file. Uses"
- " the mdown extension.</p>")
+ content, metadata = reader.read(_path("article_with_mdown_extension.mdown"))
+ expected = (
+ "<h1>Test Markdown File Header</h1>\n<h2>Used for pelican"
+ " test</h2>\n<p>This is another markdown test file. Uses"
+ " the mdown extension.</p>"
+ )
self.assertEqual(content, expected)
def test_article_with_markdown_markup_extension(self):
# test to ensure the markdown markup extension is being processed as
# expected
page = self.read_file(
- path='article_with_markdown_markup_extensions.md',
+ path="article_with_markdown_markup_extensions.md",
MARKDOWN={
- 'extension_configs': {
- 'markdown.extensions.toc': {},
- 'markdown.extensions.codehilite': {},
- 'markdown.extensions.extra': {}
+ "extension_configs": {
+ "markdown.extensions.toc": {},
+ "markdown.extensions.codehilite": {},
+ "markdown.extensions.extra": {},
}
- }
- )
- expected = ('<div class="toc">\n'
- '<ul>\n'
- '<li><a href="#level1">Level1</a><ul>\n'
- '<li><a href="#level2">Level2</a></li>\n'
- '</ul>\n'
- '</li>\n'
- '</ul>\n'
- '</div>\n'
- '<h2 id="level1">Level1</h2>\n'
- '<h3 id="level2">Level2</h3>')
+ },
+ )
+ expected = (
+ '<div class="toc">\n'
+ "<ul>\n"
+ '<li><a href="#level1">Level1</a><ul>\n'
+ '<li><a href="#level2">Level2</a></li>\n'
+ "</ul>\n"
+ "</li>\n"
+ "</ul>\n"
+ "</div>\n"
+ '<h2 id="level1">Level1</h2>\n'
+ '<h3 id="level2">Level2</h3>'
+ )
self.assertEqual(page.content, expected)
def test_article_with_filename_metadata(self):
page = self.read_file(
- path='2012-11-30_md_w_filename_meta#foo-bar.md',
- FILENAME_METADATA=None)
+ path="2012-11-30_md_w_filename_meta#foo-bar.md", FILENAME_METADATA=None
+ )
expected = {
- 'category': 'yeah',
- 'author': 'Alexis Métaireau',
+ "category": "yeah",
+ "author": "Alexis Métaireau",
}
self.assertDictHasSubset(page.metadata, expected)
page = self.read_file(
- path='2012-11-30_md_w_filename_meta#foo-bar.md',
- FILENAME_METADATA=r'(?P<date>\d{4}-\d{2}-\d{2}).*')
+ path="2012-11-30_md_w_filename_meta#foo-bar.md",
+ FILENAME_METADATA=r"(?P<date>\d{4}-\d{2}-\d{2}).*",
+ )
expected = {
- 'category': 'yeah',
- 'author': 'Alexis Métaireau',
- 'date': SafeDatetime(2012, 11, 30),
+ "category": "yeah",
+ "author": "Alexis Métaireau",
+ "date": SafeDatetime(2012, 11, 30),
}
self.assertDictHasSubset(page.metadata, expected)
page = self.read_file(
- path='2012-11-30_md_w_filename_meta#foo-bar.md',
+ path="2012-11-30_md_w_filename_meta#foo-bar.md",
FILENAME_METADATA=(
- r'(?P<date>\d{4}-\d{2}-\d{2})'
- r'_(?P<Slug>.*)'
- r'#(?P<MyMeta>.*)-(?P<author>.*)'))
+ r"(?P<date>\d{4}-\d{2}-\d{2})"
+ r"_(?P<Slug>.*)"
+ r"#(?P<MyMeta>.*)-(?P<author>.*)"
+ ),
+ )
expected = {
- 'category': 'yeah',
- 'author': 'Alexis Métaireau',
- 'date': SafeDatetime(2012, 11, 30),
- 'slug': 'md_w_filename_meta',
- 'mymeta': 'foo',
+ "category": "yeah",
+ "author": "Alexis Métaireau",
+ "date": SafeDatetime(2012, 11, 30),
+ "slug": "md_w_filename_meta",
+ "mymeta": "foo",
}
self.assertDictHasSubset(page.metadata, expected)
def test_article_with_optional_filename_metadata(self):
page = self.read_file(
- path='2012-11-30_md_w_filename_meta#foo-bar.md',
- FILENAME_METADATA=r'(?P<date>\d{4}-\d{2}-\d{2})?')
+ path="2012-11-30_md_w_filename_meta#foo-bar.md",
+ FILENAME_METADATA=r"(?P<date>\d{4}-\d{2}-\d{2})?",
+ )
expected = {
- 'date': SafeDatetime(2012, 11, 30),
- 'reader': 'markdown',
+ "date": SafeDatetime(2012, 11, 30),
+ "reader": "markdown",
}
self.assertDictHasSubset(page.metadata, expected)
page = self.read_file(
- path='empty.md',
- FILENAME_METADATA=r'(?P<date>\d{4}-\d{2}-\d{2})?')
+ path="empty.md", FILENAME_METADATA=r"(?P<date>\d{4}-\d{2}-\d{2})?"
+ )
expected = {
- 'reader': 'markdown',
+ "reader": "markdown",
}
self.assertDictHasSubset(page.metadata, expected)
- self.assertNotIn('date', page.metadata, 'Date should not be set.')
+ self.assertNotIn("date", page.metadata, "Date should not be set.")
def test_duplicate_tags_or_authors_are_removed(self):
reader = readers.MarkdownReader(settings=get_settings())
- content, metadata = reader.read(
- _path('article_with_duplicate_tags_authors.md'))
+ content, metadata = reader.read(_path("article_with_duplicate_tags_authors.md"))
expected = {
- 'tags': ['foo', 'bar', 'foobar'],
- 'authors': ['Author, First', 'Author, Second'],
+ "tags": ["foo", "bar", "foobar"],
+ "authors": ["Author, First", "Author, Second"],
}
self.assertDictHasSubset(metadata, expected)
def test_metadata_not_parsed_for_metadata(self):
settings = get_settings()
- settings['FORMATTED_FIELDS'] = ['summary']
+ settings["FORMATTED_FIELDS"] = ["summary"]
reader = readers.MarkdownReader(settings=settings)
content, metadata = reader.read(
- _path('article_with_markdown_and_nested_metadata.md'))
+ _path("article_with_markdown_and_nested_metadata.md")
+ )
expected = {
- 'title': 'Article with markdown and nested summary metadata',
- 'summary': '<p>Test: This metadata value looks like metadata</p>',
+ "title": "Article with markdown and nested summary metadata",
+ "summary": "<p>Test: This metadata value looks like metadata</p>",
}
self.assertDictHasSubset(metadata, expected)
def test_empty_file(self):
reader = readers.MarkdownReader(settings=get_settings())
- content, metadata = reader.read(
- _path('empty.md'))
+ content, metadata = reader.read(_path("empty.md"))
self.assertEqual(metadata, {})
- self.assertEqual(content, '')
+ self.assertEqual(content, "")
def test_empty_file_with_bom(self):
reader = readers.MarkdownReader(settings=get_settings())
- content, metadata = reader.read(
- _path('empty_with_bom.md'))
+ content, metadata = reader.read(_path("empty_with_bom.md"))
self.assertEqual(metadata, {})
- self.assertEqual(content, '')
+ self.assertEqual(content, "")
def test_typogrify_dashes_config(self):
# Test default config
page = self.read_file(
- path='article_with_typogrify_dashes.md',
+ path="article_with_typogrify_dashes.md",
TYPOGRIFY=True,
- TYPOGRIFY_DASHES='default')
+ TYPOGRIFY_DASHES="default",
+ )
expected = "<p>One: -; Two: —; Three: —-</p>"
expected_title = "One -, two —, three —- dashes!"
@@ -776,9 +783,10 @@ def test_typogrify_dashes_config(self):
# Test 'oldschool' variant
page = self.read_file(
- path='article_with_typogrify_dashes.md',
+ path="article_with_typogrify_dashes.md",
TYPOGRIFY=True,
- TYPOGRIFY_DASHES='oldschool')
+ TYPOGRIFY_DASHES="oldschool",
+ )
expected = "<p>One: -; Two: –; Three: —</p>"
expected_title = "One -, two –, three — dashes!"
@@ -787,9 +795,10 @@ def test_typogrify_dashes_config(self):
# Test 'oldschool_inverted' variant
page = self.read_file(
- path='article_with_typogrify_dashes.md',
+ path="article_with_typogrify_dashes.md",
TYPOGRIFY=True,
- TYPOGRIFY_DASHES='oldschool_inverted')
+ TYPOGRIFY_DASHES="oldschool_inverted",
+ )
expected = "<p>One: -; Two: —; Three: –</p>"
expected_title = "One -, two —, three – dashes!"
@@ -797,124 +806,130 @@ def test_typogrify_dashes_config(self):
self.assertEqual(page.title, expected_title)
def test_metadata_has_no_discarded_data(self):
- md_filename = 'article_with_markdown_and_empty_tags.md'
+ md_filename = "article_with_markdown_and_empty_tags.md"
- r = readers.Readers(cache_name='cache', settings=get_settings(
- CACHE_CONTENT=True))
+ r = readers.Readers(
+ cache_name="cache", settings=get_settings(CACHE_CONTENT=True)
+ )
page = r.read_file(base_path=CONTENT_PATH, path=md_filename)
- __, cached_metadata = r.get_cached_data(
- _path(md_filename), (None, None))
+ __, cached_metadata = r.get_cached_data(_path(md_filename), (None, None))
- expected = {
- 'title': 'Article with markdown and empty tags'
- }
+ expected = {"title": "Article with markdown and empty tags"}
self.assertEqual(cached_metadata, expected)
- self.assertNotIn('tags', page.metadata)
+ self.assertNotIn("tags", page.metadata)
self.assertDictHasSubset(page.metadata, expected)
class HTMLReaderTest(ReaderTest):
def test_article_with_comments(self):
- page = self.read_file(path='article_with_comments.html')
+ page = self.read_file(path="article_with_comments.html")
- self.assertEqual('''
+ self.assertEqual(
+ """
Body content
<!-- This comment is included (including extra whitespace) -->
- ''', page.content)
+ """,
+ page.content,
+ )
def test_article_with_keywords(self):
- page = self.read_file(path='article_with_keywords.html')
+ page = self.read_file(path="article_with_keywords.html")
expected = {
- 'tags': ['foo', 'bar', 'foobar'],
+ "tags": ["foo", "bar", "foobar"],
}
self.assertDictHasSubset(page.metadata, expected)
def test_article_with_metadata(self):
- page = self.read_file(path='article_with_metadata.html')
+ page = self.read_file(path="article_with_metadata.html")
expected = {
- 'category': 'yeah',
- 'author': 'Alexis Métaireau',
- 'title': 'This is a super article !',
- 'summary': 'Summary and stuff',
- 'date': SafeDatetime(2010, 12, 2, 10, 14),
- 'tags': ['foo', 'bar', 'foobar'],
- 'custom_field': 'http://notmyidea.org',
+ "category": "yeah",
+ "author": "Alexis Métaireau",
+ "title": "This is a super article !",
+ "summary": "Summary and stuff",
+ "date": SafeDatetime(2010, 12, 2, 10, 14),
+ "tags": ["foo", "bar", "foobar"],
+ "custom_field": "http://notmyidea.org",
}
self.assertDictHasSubset(page.metadata, expected)
def test_article_with_multiple_similar_metadata_tags(self):
- page = self.read_file(path='article_with_multiple_metadata_tags.html')
+ page = self.read_file(path="article_with_multiple_metadata_tags.html")
expected = {
- 'custom_field': ['https://getpelican.com', 'https://www.eff.org'],
+ "custom_field": ["https://getpelican.com", "https://www.eff.org"],
}
self.assertDictHasSubset(page.metadata, expected)
def test_article_with_multiple_authors(self):
- page = self.read_file(path='article_with_multiple_authors.html')
- expected = {
- 'authors': ['First Author', 'Second Author']
- }
+ page = self.read_file(path="article_with_multiple_authors.html")
+ expected = {"authors": ["First Author", "Second Author"]}
self.assertDictHasSubset(page.metadata, expected)
def test_article_with_metadata_and_contents_attrib(self):
- page = self.read_file(path='article_with_metadata_and_contents.html')
+ page = self.read_file(path="article_with_metadata_and_contents.html")
expected = {
- 'category': 'yeah',
- 'author': 'Alexis Métaireau',
- 'title': 'This is a super article !',
- 'summary': 'Summary and stuff',
- 'date': SafeDatetime(2010, 12, 2, 10, 14),
- 'tags': ['foo', 'bar', 'foobar'],
- 'custom_field': 'http://notmyidea.org',
+ "category": "yeah",
+ "author": "Alexis Métaireau",
+ "title": "This is a super article !",
+ "summary": "Summary and stuff",
+ "date": SafeDatetime(2010, 12, 2, 10, 14),
+ "tags": ["foo", "bar", "foobar"],
+ "custom_field": "http://notmyidea.org",
}
self.assertDictHasSubset(page.metadata, expected)
def test_article_with_null_attributes(self):
- page = self.read_file(path='article_with_null_attributes.html')
+ page = self.read_file(path="article_with_null_attributes.html")
- self.assertEqual('''
+ self.assertEqual(
+ """
Ensure that empty attributes are copied properly.
<input name="test" disabled style="" />
- ''', page.content)
+ """,
+ page.content,
+ )
def test_article_with_attributes_containing_double_quotes(self):
- page = self.read_file(path='article_with_attributes_containing_' +
- 'double_quotes.html')
- self.assertEqual('''
+ page = self.read_file(
+ path="article_with_attributes_containing_" + "double_quotes.html"
+ )
+ self.assertEqual(
+ """
Ensure that if an attribute value contains a double quote, it is
surrounded with single quotes, otherwise with double quotes.
<span data-test="'single quoted string'">Span content</span>
<span data-test='"double quoted string"'>Span content</span>
<span data-test="string without quotes">Span content</span>
- ''', page.content)
+ """,
+ page.content,
+ )
def test_article_metadata_key_lowercase(self):
# Keys of metadata should be lowercase.
- page = self.read_file(path='article_with_uppercase_metadata.html')
+ page = self.read_file(path="article_with_uppercase_metadata.html")
# Key should be lowercase
- self.assertIn('category', page.metadata, 'Key should be lowercase.')
+ self.assertIn("category", page.metadata, "Key should be lowercase.")
# Value should keep cases
- self.assertEqual('Yeah', page.metadata.get('category'))
+ self.assertEqual("Yeah", page.metadata.get("category"))
def test_article_with_nonconformant_meta_tags(self):
- page = self.read_file(path='article_with_nonconformant_meta_tags.html')
+ page = self.read_file(path="article_with_nonconformant_meta_tags.html")
expected = {
- 'summary': 'Summary and stuff',
- 'title': 'Article with Nonconformant HTML meta tags',
+ "summary": "Summary and stuff",
+ "title": "Article with Nonconformant HTML meta tags",
}
self.assertDictHasSubset(page.metadata, expected)
def test_article_with_inline_svg(self):
- page = self.read_file(path='article_with_inline_svg.html')
+ page = self.read_file(path="article_with_inline_svg.html")
expected = {
- 'title': 'Article with an inline SVG',
+ "title": "Article with an inline SVG",
}
self.assertDictHasSubset(page.metadata, expected)
diff --git a/pelican/tests/test_rstdirectives.py b/pelican/tests/test_rstdirectives.py
--- a/pelican/tests/test_rstdirectives.py
+++ b/pelican/tests/test_rstdirectives.py
@@ -6,11 +6,11 @@
class Test_abbr_role(unittest.TestCase):
def call_it(self, text):
from pelican.rstdirectives import abbr_role
+
rawtext = text
lineno = 42
- inliner = Mock(name='inliner')
- nodes, system_messages = abbr_role(
- 'abbr', rawtext, text, lineno, inliner)
+ inliner = Mock(name="inliner")
+ nodes, system_messages = abbr_role("abbr", rawtext, text, lineno, inliner)
self.assertEqual(system_messages, [])
self.assertEqual(len(nodes), 1)
return nodes[0]
@@ -18,14 +18,14 @@ def call_it(self, text):
def test(self):
node = self.call_it("Abbr (Abbreviation)")
self.assertEqual(node.astext(), "Abbr")
- self.assertEqual(node['explanation'], "Abbreviation")
+ self.assertEqual(node["explanation"], "Abbreviation")
def test_newlines_in_explanation(self):
node = self.call_it("CUL (See you\nlater)")
self.assertEqual(node.astext(), "CUL")
- self.assertEqual(node['explanation'], "See you\nlater")
+ self.assertEqual(node["explanation"], "See you\nlater")
def test_newlines_in_abbr(self):
node = self.call_it("US of\nA \n (USA)")
self.assertEqual(node.astext(), "US of\nA")
- self.assertEqual(node['explanation'], "USA")
+ self.assertEqual(node["explanation"], "USA")
diff --git a/pelican/tests/test_server.py b/pelican/tests/test_server.py
--- a/pelican/tests/test_server.py
+++ b/pelican/tests/test_server.py
@@ -17,10 +17,9 @@ class MockServer:
class TestServer(unittest.TestCase):
-
def setUp(self):
self.server = MockServer()
- self.temp_output = mkdtemp(prefix='pelicantests.')
+ self.temp_output = mkdtemp(prefix="pelicantests.")
self.old_cwd = os.getcwd()
os.chdir(self.temp_output)
@@ -29,32 +28,33 @@ def tearDown(self):
rmtree(self.temp_output)
def test_get_path_that_exists(self):
- handler = ComplexHTTPRequestHandler(MockRequest(), ('0.0.0.0', 8888),
- self.server)
+ handler = ComplexHTTPRequestHandler(
+ MockRequest(), ("0.0.0.0", 8888), self.server
+ )
handler.base_path = self.temp_output
- open(os.path.join(self.temp_output, 'foo.html'), 'a').close()
- os.mkdir(os.path.join(self.temp_output, 'foo'))
- open(os.path.join(self.temp_output, 'foo', 'index.html'), 'a').close()
+ open(os.path.join(self.temp_output, "foo.html"), "a").close()
+ os.mkdir(os.path.join(self.temp_output, "foo"))
+ open(os.path.join(self.temp_output, "foo", "index.html"), "a").close()
- os.mkdir(os.path.join(self.temp_output, 'bar'))
- open(os.path.join(self.temp_output, 'bar', 'index.html'), 'a').close()
+ os.mkdir(os.path.join(self.temp_output, "bar"))
+ open(os.path.join(self.temp_output, "bar", "index.html"), "a").close()
- os.mkdir(os.path.join(self.temp_output, 'baz'))
+ os.mkdir(os.path.join(self.temp_output, "baz"))
- for suffix in ['', '/']:
+ for suffix in ["", "/"]:
# foo.html has precedence over foo/index.html
- path = handler.get_path_that_exists('foo' + suffix)
- self.assertEqual(path, 'foo.html')
+ path = handler.get_path_that_exists("foo" + suffix)
+ self.assertEqual(path, "foo.html")
# folder with index.html should return folder/index.html
- path = handler.get_path_that_exists('bar' + suffix)
- self.assertEqual(path, 'bar/index.html')
+ path = handler.get_path_that_exists("bar" + suffix)
+ self.assertEqual(path, "bar/index.html")
# folder without index.html should return same as input
- path = handler.get_path_that_exists('baz' + suffix)
- self.assertEqual(path, 'baz' + suffix)
+ path = handler.get_path_that_exists("baz" + suffix)
+ self.assertEqual(path, "baz" + suffix)
# not existing path should return None
- path = handler.get_path_that_exists('quux' + suffix)
+ path = handler.get_path_that_exists("quux" + suffix)
self.assertIsNone(path)
diff --git a/pelican/tests/test_settings.py b/pelican/tests/test_settings.py
--- a/pelican/tests/test_settings.py
+++ b/pelican/tests/test_settings.py
@@ -4,10 +4,14 @@
from os.path import abspath, dirname, join
-from pelican.settings import (DEFAULT_CONFIG, DEFAULT_THEME,
- _printf_s_to_format_field,
- configure_settings,
- handle_deprecated_settings, read_settings)
+from pelican.settings import (
+ DEFAULT_CONFIG,
+ DEFAULT_THEME,
+ _printf_s_to_format_field,
+ configure_settings,
+ handle_deprecated_settings,
+ read_settings,
+)
from pelican.tests.support import unittest
@@ -16,40 +20,39 @@ class TestSettingsConfiguration(unittest.TestCase):
append new values to the settings (if any), and apply basic settings
optimizations.
"""
+
def setUp(self):
self.old_locale = locale.setlocale(locale.LC_ALL)
- locale.setlocale(locale.LC_ALL, 'C')
+ locale.setlocale(locale.LC_ALL, "C")
self.PATH = abspath(dirname(__file__))
- default_conf = join(self.PATH, 'default_conf.py')
+ default_conf = join(self.PATH, "default_conf.py")
self.settings = read_settings(default_conf)
def tearDown(self):
locale.setlocale(locale.LC_ALL, self.old_locale)
def test_overwrite_existing_settings(self):
- self.assertEqual(self.settings.get('SITENAME'), "Alexis' log")
- self.assertEqual(
- self.settings.get('SITEURL'),
- 'http://blog.notmyidea.org')
+ self.assertEqual(self.settings.get("SITENAME"), "Alexis' log")
+ self.assertEqual(self.settings.get("SITEURL"), "http://blog.notmyidea.org")
def test_keep_default_settings(self):
# Keep default settings if not defined.
self.assertEqual(
- self.settings.get('DEFAULT_CATEGORY'),
- DEFAULT_CONFIG['DEFAULT_CATEGORY'])
+ self.settings.get("DEFAULT_CATEGORY"), DEFAULT_CONFIG["DEFAULT_CATEGORY"]
+ )
def test_dont_copy_small_keys(self):
# Do not copy keys not in caps.
- self.assertNotIn('foobar', self.settings)
+ self.assertNotIn("foobar", self.settings)
def test_read_empty_settings(self):
# Ensure an empty settings file results in default settings.
settings = read_settings(None)
expected = copy.deepcopy(DEFAULT_CONFIG)
# Added by configure settings
- expected['FEED_DOMAIN'] = ''
- expected['ARTICLE_EXCLUDES'] = ['pages']
- expected['PAGE_EXCLUDES'] = ['']
+ expected["FEED_DOMAIN"] = ""
+ expected["ARTICLE_EXCLUDES"] = ["pages"]
+ expected["PAGE_EXCLUDES"] = [""]
self.maxDiff = None
self.assertDictEqual(settings, expected)
@@ -57,250 +60,265 @@ def test_settings_return_independent(self):
# Make sure that the results from one settings call doesn't
# effect past or future instances.
self.PATH = abspath(dirname(__file__))
- default_conf = join(self.PATH, 'default_conf.py')
+ default_conf = join(self.PATH, "default_conf.py")
settings = read_settings(default_conf)
- settings['SITEURL'] = 'new-value'
+ settings["SITEURL"] = "new-value"
new_settings = read_settings(default_conf)
- self.assertNotEqual(new_settings['SITEURL'], settings['SITEURL'])
+ self.assertNotEqual(new_settings["SITEURL"], settings["SITEURL"])
def test_defaults_not_overwritten(self):
# This assumes 'SITENAME': 'A Pelican Blog'
settings = read_settings(None)
- settings['SITENAME'] = 'Not a Pelican Blog'
- self.assertNotEqual(settings['SITENAME'], DEFAULT_CONFIG['SITENAME'])
+ settings["SITENAME"] = "Not a Pelican Blog"
+ self.assertNotEqual(settings["SITENAME"], DEFAULT_CONFIG["SITENAME"])
def test_static_path_settings_safety(self):
# Disallow static paths from being strings
settings = {
- 'STATIC_PATHS': 'foo/bar',
- 'THEME_STATIC_PATHS': 'bar/baz',
+ "STATIC_PATHS": "foo/bar",
+ "THEME_STATIC_PATHS": "bar/baz",
# These 4 settings are required to run configure_settings
- 'PATH': '.',
- 'THEME': DEFAULT_THEME,
- 'SITEURL': 'http://blog.notmyidea.org/',
- 'LOCALE': '',
+ "PATH": ".",
+ "THEME": DEFAULT_THEME,
+ "SITEURL": "http://blog.notmyidea.org/",
+ "LOCALE": "",
}
configure_settings(settings)
+ self.assertEqual(settings["STATIC_PATHS"], DEFAULT_CONFIG["STATIC_PATHS"])
self.assertEqual(
- settings['STATIC_PATHS'],
- DEFAULT_CONFIG['STATIC_PATHS'])
- self.assertEqual(
- settings['THEME_STATIC_PATHS'],
- DEFAULT_CONFIG['THEME_STATIC_PATHS'])
+ settings["THEME_STATIC_PATHS"], DEFAULT_CONFIG["THEME_STATIC_PATHS"]
+ )
def test_configure_settings(self):
# Manipulations to settings should be applied correctly.
settings = {
- 'SITEURL': 'http://blog.notmyidea.org/',
- 'LOCALE': '',
- 'PATH': os.curdir,
- 'THEME': DEFAULT_THEME,
+ "SITEURL": "http://blog.notmyidea.org/",
+ "LOCALE": "",
+ "PATH": os.curdir,
+ "THEME": DEFAULT_THEME,
}
configure_settings(settings)
# SITEURL should not have a trailing slash
- self.assertEqual(settings['SITEURL'], 'http://blog.notmyidea.org')
+ self.assertEqual(settings["SITEURL"], "http://blog.notmyidea.org")
# FEED_DOMAIN, if undefined, should default to SITEURL
- self.assertEqual(settings['FEED_DOMAIN'], 'http://blog.notmyidea.org')
+ self.assertEqual(settings["FEED_DOMAIN"], "http://blog.notmyidea.org")
- settings['FEED_DOMAIN'] = 'http://feeds.example.com'
+ settings["FEED_DOMAIN"] = "http://feeds.example.com"
configure_settings(settings)
- self.assertEqual(settings['FEED_DOMAIN'], 'http://feeds.example.com')
+ self.assertEqual(settings["FEED_DOMAIN"], "http://feeds.example.com")
def test_theme_settings_exceptions(self):
settings = self.settings
# Check that theme lookup in "pelican/themes" functions as expected
- settings['THEME'] = os.path.split(settings['THEME'])[1]
+ settings["THEME"] = os.path.split(settings["THEME"])[1]
configure_settings(settings)
- self.assertEqual(settings['THEME'], DEFAULT_THEME)
+ self.assertEqual(settings["THEME"], DEFAULT_THEME)
# Check that non-existent theme raises exception
- settings['THEME'] = 'foo'
+ settings["THEME"] = "foo"
self.assertRaises(Exception, configure_settings, settings)
def test_deprecated_dir_setting(self):
settings = self.settings
- settings['ARTICLE_DIR'] = 'foo'
- settings['PAGE_DIR'] = 'bar'
+ settings["ARTICLE_DIR"] = "foo"
+ settings["PAGE_DIR"] = "bar"
settings = handle_deprecated_settings(settings)
- self.assertEqual(settings['ARTICLE_PATHS'], ['foo'])
- self.assertEqual(settings['PAGE_PATHS'], ['bar'])
+ self.assertEqual(settings["ARTICLE_PATHS"], ["foo"])
+ self.assertEqual(settings["PAGE_PATHS"], ["bar"])
with self.assertRaises(KeyError):
- settings['ARTICLE_DIR']
- settings['PAGE_DIR']
+ settings["ARTICLE_DIR"]
+ settings["PAGE_DIR"]
def test_default_encoding(self):
# Test that the user locale is set if not specified in settings
- locale.setlocale(locale.LC_ALL, 'C')
+ locale.setlocale(locale.LC_ALL, "C")
# empty string = user system locale
- self.assertEqual(self.settings['LOCALE'], [''])
+ self.assertEqual(self.settings["LOCALE"], [""])
configure_settings(self.settings)
lc_time = locale.getlocale(locale.LC_TIME) # should be set to user locale
# explicitly set locale to user pref and test
- locale.setlocale(locale.LC_TIME, '')
+ locale.setlocale(locale.LC_TIME, "")
self.assertEqual(lc_time, locale.getlocale(locale.LC_TIME))
def test_invalid_settings_throw_exception(self):
# Test that the path name is valid
# test that 'PATH' is set
- settings = {
- }
+ settings = {}
self.assertRaises(Exception, configure_settings, settings)
# Test that 'PATH' is valid
- settings['PATH'] = ''
+ settings["PATH"] = ""
self.assertRaises(Exception, configure_settings, settings)
# Test nonexistent THEME
- settings['PATH'] = os.curdir
- settings['THEME'] = 'foo'
+ settings["PATH"] = os.curdir
+ settings["THEME"] = "foo"
self.assertRaises(Exception, configure_settings, settings)
def test__printf_s_to_format_field(self):
- for s in ('%s', '{%s}', '{%s'):
- option = 'foo/{}/bar.baz'.format(s)
- result = _printf_s_to_format_field(option, 'slug')
- expected = option % 'qux'
- found = result.format(slug='qux')
+ for s in ("%s", "{%s}", "{%s"):
+ option = "foo/{}/bar.baz".format(s)
+ result = _printf_s_to_format_field(option, "slug")
+ expected = option % "qux"
+ found = result.format(slug="qux")
self.assertEqual(expected, found)
def test_deprecated_extra_templates_paths(self):
settings = self.settings
- settings['EXTRA_TEMPLATES_PATHS'] = ['/foo/bar', '/ha']
+ settings["EXTRA_TEMPLATES_PATHS"] = ["/foo/bar", "/ha"]
settings = handle_deprecated_settings(settings)
- self.assertEqual(settings['THEME_TEMPLATES_OVERRIDES'],
- ['/foo/bar', '/ha'])
- self.assertNotIn('EXTRA_TEMPLATES_PATHS', settings)
+ self.assertEqual(settings["THEME_TEMPLATES_OVERRIDES"], ["/foo/bar", "/ha"])
+ self.assertNotIn("EXTRA_TEMPLATES_PATHS", settings)
def test_deprecated_paginated_direct_templates(self):
settings = self.settings
- settings['PAGINATED_DIRECT_TEMPLATES'] = ['index', 'archives']
- settings['PAGINATED_TEMPLATES'] = {'index': 10, 'category': None}
+ settings["PAGINATED_DIRECT_TEMPLATES"] = ["index", "archives"]
+ settings["PAGINATED_TEMPLATES"] = {"index": 10, "category": None}
settings = handle_deprecated_settings(settings)
- self.assertEqual(settings['PAGINATED_TEMPLATES'],
- {'index': 10, 'category': None, 'archives': None})
- self.assertNotIn('PAGINATED_DIRECT_TEMPLATES', settings)
+ self.assertEqual(
+ settings["PAGINATED_TEMPLATES"],
+ {"index": 10, "category": None, "archives": None},
+ )
+ self.assertNotIn("PAGINATED_DIRECT_TEMPLATES", settings)
def test_deprecated_paginated_direct_templates_from_file(self):
# This is equivalent to reading a settings file that has
# PAGINATED_DIRECT_TEMPLATES defined but no PAGINATED_TEMPLATES.
- settings = read_settings(None, override={
- 'PAGINATED_DIRECT_TEMPLATES': ['index', 'archives']
- })
- self.assertEqual(settings['PAGINATED_TEMPLATES'], {
- 'archives': None,
- 'author': None,
- 'index': None,
- 'category': None,
- 'tag': None})
- self.assertNotIn('PAGINATED_DIRECT_TEMPLATES', settings)
+ settings = read_settings(
+ None, override={"PAGINATED_DIRECT_TEMPLATES": ["index", "archives"]}
+ )
+ self.assertEqual(
+ settings["PAGINATED_TEMPLATES"],
+ {
+ "archives": None,
+ "author": None,
+ "index": None,
+ "category": None,
+ "tag": None,
+ },
+ )
+ self.assertNotIn("PAGINATED_DIRECT_TEMPLATES", settings)
def test_theme_and_extra_templates_exception(self):
settings = self.settings
- settings['EXTRA_TEMPLATES_PATHS'] = ['/ha']
- settings['THEME_TEMPLATES_OVERRIDES'] = ['/foo/bar']
+ settings["EXTRA_TEMPLATES_PATHS"] = ["/ha"]
+ settings["THEME_TEMPLATES_OVERRIDES"] = ["/foo/bar"]
self.assertRaises(Exception, handle_deprecated_settings, settings)
def test_slug_and_slug_regex_substitutions_exception(self):
settings = {}
- settings['SLUG_REGEX_SUBSTITUTIONS'] = [('C++', 'cpp')]
- settings['TAG_SUBSTITUTIONS'] = [('C#', 'csharp')]
+ settings["SLUG_REGEX_SUBSTITUTIONS"] = [("C++", "cpp")]
+ settings["TAG_SUBSTITUTIONS"] = [("C#", "csharp")]
self.assertRaises(Exception, handle_deprecated_settings, settings)
def test_deprecated_slug_substitutions(self):
- default_slug_regex_subs = self.settings['SLUG_REGEX_SUBSTITUTIONS']
+ default_slug_regex_subs = self.settings["SLUG_REGEX_SUBSTITUTIONS"]
# If no deprecated setting is set, don't set new ones
settings = {}
settings = handle_deprecated_settings(settings)
- self.assertNotIn('SLUG_REGEX_SUBSTITUTIONS', settings)
- self.assertNotIn('TAG_REGEX_SUBSTITUTIONS', settings)
- self.assertNotIn('CATEGORY_REGEX_SUBSTITUTIONS', settings)
- self.assertNotIn('AUTHOR_REGEX_SUBSTITUTIONS', settings)
+ self.assertNotIn("SLUG_REGEX_SUBSTITUTIONS", settings)
+ self.assertNotIn("TAG_REGEX_SUBSTITUTIONS", settings)
+ self.assertNotIn("CATEGORY_REGEX_SUBSTITUTIONS", settings)
+ self.assertNotIn("AUTHOR_REGEX_SUBSTITUTIONS", settings)
# If SLUG_SUBSTITUTIONS is set, set {SLUG, AUTHOR}_REGEX_SUBSTITUTIONS
# correctly, don't set {CATEGORY, TAG}_REGEX_SUBSTITUTIONS
settings = {}
- settings['SLUG_SUBSTITUTIONS'] = [('C++', 'cpp')]
+ settings["SLUG_SUBSTITUTIONS"] = [("C++", "cpp")]
settings = handle_deprecated_settings(settings)
- self.assertEqual(settings.get('SLUG_REGEX_SUBSTITUTIONS'),
- [(r'C\+\+', 'cpp')] + default_slug_regex_subs)
- self.assertNotIn('TAG_REGEX_SUBSTITUTIONS', settings)
- self.assertNotIn('CATEGORY_REGEX_SUBSTITUTIONS', settings)
- self.assertEqual(settings.get('AUTHOR_REGEX_SUBSTITUTIONS'),
- default_slug_regex_subs)
+ self.assertEqual(
+ settings.get("SLUG_REGEX_SUBSTITUTIONS"),
+ [(r"C\+\+", "cpp")] + default_slug_regex_subs,
+ )
+ self.assertNotIn("TAG_REGEX_SUBSTITUTIONS", settings)
+ self.assertNotIn("CATEGORY_REGEX_SUBSTITUTIONS", settings)
+ self.assertEqual(
+ settings.get("AUTHOR_REGEX_SUBSTITUTIONS"), default_slug_regex_subs
+ )
# If {CATEGORY, TAG, AUTHOR}_SUBSTITUTIONS are set, set
# {CATEGORY, TAG, AUTHOR}_REGEX_SUBSTITUTIONS correctly, don't set
# SLUG_REGEX_SUBSTITUTIONS
settings = {}
- settings['TAG_SUBSTITUTIONS'] = [('C#', 'csharp')]
- settings['CATEGORY_SUBSTITUTIONS'] = [('C#', 'csharp')]
- settings['AUTHOR_SUBSTITUTIONS'] = [('Alexander Todorov', 'atodorov')]
+ settings["TAG_SUBSTITUTIONS"] = [("C#", "csharp")]
+ settings["CATEGORY_SUBSTITUTIONS"] = [("C#", "csharp")]
+ settings["AUTHOR_SUBSTITUTIONS"] = [("Alexander Todorov", "atodorov")]
settings = handle_deprecated_settings(settings)
- self.assertNotIn('SLUG_REGEX_SUBSTITUTIONS', settings)
- self.assertEqual(settings['TAG_REGEX_SUBSTITUTIONS'],
- [(r'C\#', 'csharp')] + default_slug_regex_subs)
- self.assertEqual(settings['CATEGORY_REGEX_SUBSTITUTIONS'],
- [(r'C\#', 'csharp')] + default_slug_regex_subs)
- self.assertEqual(settings['AUTHOR_REGEX_SUBSTITUTIONS'],
- [(r'Alexander\ Todorov', 'atodorov')] +
- default_slug_regex_subs)
+ self.assertNotIn("SLUG_REGEX_SUBSTITUTIONS", settings)
+ self.assertEqual(
+ settings["TAG_REGEX_SUBSTITUTIONS"],
+ [(r"C\#", "csharp")] + default_slug_regex_subs,
+ )
+ self.assertEqual(
+ settings["CATEGORY_REGEX_SUBSTITUTIONS"],
+ [(r"C\#", "csharp")] + default_slug_regex_subs,
+ )
+ self.assertEqual(
+ settings["AUTHOR_REGEX_SUBSTITUTIONS"],
+ [(r"Alexander\ Todorov", "atodorov")] + default_slug_regex_subs,
+ )
# If {SLUG, CATEGORY, TAG, AUTHOR}_SUBSTITUTIONS are set, set
# {SLUG, CATEGORY, TAG, AUTHOR}_REGEX_SUBSTITUTIONS correctly
settings = {}
- settings['SLUG_SUBSTITUTIONS'] = [('C++', 'cpp')]
- settings['TAG_SUBSTITUTIONS'] = [('C#', 'csharp')]
- settings['CATEGORY_SUBSTITUTIONS'] = [('C#', 'csharp')]
- settings['AUTHOR_SUBSTITUTIONS'] = [('Alexander Todorov', 'atodorov')]
+ settings["SLUG_SUBSTITUTIONS"] = [("C++", "cpp")]
+ settings["TAG_SUBSTITUTIONS"] = [("C#", "csharp")]
+ settings["CATEGORY_SUBSTITUTIONS"] = [("C#", "csharp")]
+ settings["AUTHOR_SUBSTITUTIONS"] = [("Alexander Todorov", "atodorov")]
settings = handle_deprecated_settings(settings)
- self.assertEqual(settings['TAG_REGEX_SUBSTITUTIONS'],
- [(r'C\+\+', 'cpp')] + [(r'C\#', 'csharp')] +
- default_slug_regex_subs)
- self.assertEqual(settings['CATEGORY_REGEX_SUBSTITUTIONS'],
- [(r'C\+\+', 'cpp')] + [(r'C\#', 'csharp')] +
- default_slug_regex_subs)
- self.assertEqual(settings['AUTHOR_REGEX_SUBSTITUTIONS'],
- [(r'Alexander\ Todorov', 'atodorov')] +
- default_slug_regex_subs)
+ self.assertEqual(
+ settings["TAG_REGEX_SUBSTITUTIONS"],
+ [(r"C\+\+", "cpp")] + [(r"C\#", "csharp")] + default_slug_regex_subs,
+ )
+ self.assertEqual(
+ settings["CATEGORY_REGEX_SUBSTITUTIONS"],
+ [(r"C\+\+", "cpp")] + [(r"C\#", "csharp")] + default_slug_regex_subs,
+ )
+ self.assertEqual(
+ settings["AUTHOR_REGEX_SUBSTITUTIONS"],
+ [(r"Alexander\ Todorov", "atodorov")] + default_slug_regex_subs,
+ )
# Handle old 'skip' flags correctly
settings = {}
- settings['SLUG_SUBSTITUTIONS'] = [('C++', 'cpp', True)]
- settings['AUTHOR_SUBSTITUTIONS'] = [('Alexander Todorov', 'atodorov',
- False)]
+ settings["SLUG_SUBSTITUTIONS"] = [("C++", "cpp", True)]
+ settings["AUTHOR_SUBSTITUTIONS"] = [("Alexander Todorov", "atodorov", False)]
settings = handle_deprecated_settings(settings)
- self.assertEqual(settings.get('SLUG_REGEX_SUBSTITUTIONS'),
- [(r'C\+\+', 'cpp')] +
- [(r'(?u)\A\s*', ''), (r'(?u)\s*\Z', '')])
- self.assertEqual(settings['AUTHOR_REGEX_SUBSTITUTIONS'],
- [(r'Alexander\ Todorov', 'atodorov')] +
- default_slug_regex_subs)
+ self.assertEqual(
+ settings.get("SLUG_REGEX_SUBSTITUTIONS"),
+ [(r"C\+\+", "cpp")] + [(r"(?u)\A\s*", ""), (r"(?u)\s*\Z", "")],
+ )
+ self.assertEqual(
+ settings["AUTHOR_REGEX_SUBSTITUTIONS"],
+ [(r"Alexander\ Todorov", "atodorov")] + default_slug_regex_subs,
+ )
def test_deprecated_slug_substitutions_from_file(self):
# This is equivalent to reading a settings file that has
# SLUG_SUBSTITUTIONS defined but no SLUG_REGEX_SUBSTITUTIONS.
- settings = read_settings(None, override={
- 'SLUG_SUBSTITUTIONS': [('C++', 'cpp')]
- })
- self.assertEqual(settings['SLUG_REGEX_SUBSTITUTIONS'],
- [(r'C\+\+', 'cpp')] +
- self.settings['SLUG_REGEX_SUBSTITUTIONS'])
- self.assertNotIn('SLUG_SUBSTITUTIONS', settings)
+ settings = read_settings(
+ None, override={"SLUG_SUBSTITUTIONS": [("C++", "cpp")]}
+ )
+ self.assertEqual(
+ settings["SLUG_REGEX_SUBSTITUTIONS"],
+ [(r"C\+\+", "cpp")] + self.settings["SLUG_REGEX_SUBSTITUTIONS"],
+ )
+ self.assertNotIn("SLUG_SUBSTITUTIONS", settings)
diff --git a/pelican/tests/test_testsuite.py b/pelican/tests/test_testsuite.py
--- a/pelican/tests/test_testsuite.py
+++ b/pelican/tests/test_testsuite.py
@@ -4,7 +4,6 @@
class TestSuiteTest(unittest.TestCase):
-
def test_error_on_warning(self):
with self.assertRaises(UserWarning):
- warnings.warn('test warning')
+ warnings.warn("test warning")
diff --git a/pelican/tests/test_urlwrappers.py b/pelican/tests/test_urlwrappers.py
--- a/pelican/tests/test_urlwrappers.py
+++ b/pelican/tests/test_urlwrappers.py
@@ -5,22 +5,22 @@
class TestURLWrapper(unittest.TestCase):
def test_ordering(self):
# URLWrappers are sorted by name
- wrapper_a = URLWrapper(name='first', settings={})
- wrapper_b = URLWrapper(name='last', settings={})
+ wrapper_a = URLWrapper(name="first", settings={})
+ wrapper_b = URLWrapper(name="last", settings={})
self.assertFalse(wrapper_a > wrapper_b)
self.assertFalse(wrapper_a >= wrapper_b)
self.assertFalse(wrapper_a == wrapper_b)
self.assertTrue(wrapper_a != wrapper_b)
self.assertTrue(wrapper_a <= wrapper_b)
self.assertTrue(wrapper_a < wrapper_b)
- wrapper_b.name = 'first'
+ wrapper_b.name = "first"
self.assertFalse(wrapper_a > wrapper_b)
self.assertTrue(wrapper_a >= wrapper_b)
self.assertTrue(wrapper_a == wrapper_b)
self.assertFalse(wrapper_a != wrapper_b)
self.assertTrue(wrapper_a <= wrapper_b)
self.assertFalse(wrapper_a < wrapper_b)
- wrapper_a.name = 'last'
+ wrapper_a.name = "last"
self.assertTrue(wrapper_a > wrapper_b)
self.assertTrue(wrapper_a >= wrapper_b)
self.assertFalse(wrapper_a == wrapper_b)
@@ -29,57 +29,68 @@ def test_ordering(self):
self.assertFalse(wrapper_a < wrapper_b)
def test_equality(self):
- tag = Tag('test', settings={})
- cat = Category('test', settings={})
- author = Author('test', settings={})
+ tag = Tag("test", settings={})
+ cat = Category("test", settings={})
+ author = Author("test", settings={})
# same name, but different class
self.assertNotEqual(tag, cat)
self.assertNotEqual(tag, author)
# should be equal vs text representing the same name
- self.assertEqual(tag, 'test')
+ self.assertEqual(tag, "test")
# should not be equal vs binary
- self.assertNotEqual(tag, b'test')
+ self.assertNotEqual(tag, b"test")
# Tags describing the same should be equal
- tag_equal = Tag('Test', settings={})
+ tag_equal = Tag("Test", settings={})
self.assertEqual(tag, tag_equal)
# Author describing the same should be equal
- author_equal = Author('Test', settings={})
+ author_equal = Author("Test", settings={})
self.assertEqual(author, author_equal)
- cat_ascii = Category('指導書', settings={})
- self.assertEqual(cat_ascii, 'zhi dao shu')
+ cat_ascii = Category("指導書", settings={})
+ self.assertEqual(cat_ascii, "zhi dao shu")
def test_slugify_with_substitutions_and_dots(self):
- tag = Tag('Tag Dot', settings={'TAG_REGEX_SUBSTITUTIONS': [
- ('Tag Dot', 'tag.dot'),
- ]})
- cat = Category('Category Dot',
- settings={'CATEGORY_REGEX_SUBSTITUTIONS': [
- ('Category Dot', 'cat.dot'),
- ]})
+ tag = Tag(
+ "Tag Dot",
+ settings={
+ "TAG_REGEX_SUBSTITUTIONS": [
+ ("Tag Dot", "tag.dot"),
+ ]
+ },
+ )
+ cat = Category(
+ "Category Dot",
+ settings={
+ "CATEGORY_REGEX_SUBSTITUTIONS": [
+ ("Category Dot", "cat.dot"),
+ ]
+ },
+ )
- self.assertEqual(tag.slug, 'tag.dot')
- self.assertEqual(cat.slug, 'cat.dot')
+ self.assertEqual(tag.slug, "tag.dot")
+ self.assertEqual(cat.slug, "cat.dot")
def test_author_slug_substitutions(self):
- settings = {'AUTHOR_REGEX_SUBSTITUTIONS': [
- ('Alexander Todorov', 'atodorov'),
- ('Krasimir Tsonev', 'krasimir'),
- (r'[^\w\s-]', ''),
- (r'(?u)\A\s*', ''),
- (r'(?u)\s*\Z', ''),
- (r'[-\s]+', '-'),
- ]}
+ settings = {
+ "AUTHOR_REGEX_SUBSTITUTIONS": [
+ ("Alexander Todorov", "atodorov"),
+ ("Krasimir Tsonev", "krasimir"),
+ (r"[^\w\s-]", ""),
+ (r"(?u)\A\s*", ""),
+ (r"(?u)\s*\Z", ""),
+ (r"[-\s]+", "-"),
+ ]
+ }
- author1 = Author('Mr. Senko', settings=settings)
- author2 = Author('Alexander Todorov', settings=settings)
- author3 = Author('Krasimir Tsonev', settings=settings)
+ author1 = Author("Mr. Senko", settings=settings)
+ author2 = Author("Alexander Todorov", settings=settings)
+ author3 = Author("Krasimir Tsonev", settings=settings)
- self.assertEqual(author1.slug, 'mr-senko')
- self.assertEqual(author2.slug, 'atodorov')
- self.assertEqual(author3.slug, 'krasimir')
+ self.assertEqual(author1.slug, "mr-senko")
+ self.assertEqual(author2.slug, "atodorov")
+ self.assertEqual(author3.slug, "krasimir")
diff --git a/pelican/tests/test_utils.py b/pelican/tests/test_utils.py
--- a/pelican/tests/test_utils.py
+++ b/pelican/tests/test_utils.py
@@ -14,25 +14,29 @@
from pelican import utils
from pelican.generators import TemplatePagesGenerator
from pelican.settings import read_settings
-from pelican.tests.support import (LoggedTestCase, get_article,
- locale_available, unittest)
+from pelican.tests.support import (
+ LoggedTestCase,
+ get_article,
+ locale_available,
+ unittest,
+)
from pelican.writers import Writer
class TestUtils(LoggedTestCase):
- _new_attribute = 'new_value'
+ _new_attribute = "new_value"
def setUp(self):
super().setUp()
- self.temp_output = mkdtemp(prefix='pelicantests.')
+ self.temp_output = mkdtemp(prefix="pelicantests.")
def tearDown(self):
super().tearDown()
shutil.rmtree(self.temp_output)
@utils.deprecated_attribute(
- old='_old_attribute', new='_new_attribute',
- since=(3, 1, 0), remove=(4, 1, 3))
+ old="_old_attribute", new="_new_attribute", since=(3, 1, 0), remove=(4, 1, 3)
+ )
def _old_attribute():
return None
@@ -41,69 +45,109 @@ def test_deprecated_attribute(self):
self.assertEqual(value, self._new_attribute)
self.assertLogCountEqual(
count=1,
- msg=('_old_attribute has been deprecated since 3.1.0 and will be '
- 'removed by version 4.1.3. Use _new_attribute instead'),
- level=logging.WARNING)
+ msg=(
+ "_old_attribute has been deprecated since 3.1.0 and will be "
+ "removed by version 4.1.3. Use _new_attribute instead"
+ ),
+ level=logging.WARNING,
+ )
def test_get_date(self):
# valid ones
date = utils.SafeDatetime(year=2012, month=11, day=22)
- date_hour = utils.SafeDatetime(
- year=2012, month=11, day=22, hour=22, minute=11)
+ date_hour = utils.SafeDatetime(year=2012, month=11, day=22, hour=22, minute=11)
date_hour_z = utils.SafeDatetime(
- year=2012, month=11, day=22, hour=22, minute=11,
- tzinfo=timezone.utc)
+ year=2012, month=11, day=22, hour=22, minute=11, tzinfo=timezone.utc
+ )
date_hour_est = utils.SafeDatetime(
- year=2012, month=11, day=22, hour=22, minute=11,
- tzinfo=ZoneInfo("EST"))
+ year=2012, month=11, day=22, hour=22, minute=11, tzinfo=ZoneInfo("EST")
+ )
date_hour_sec = utils.SafeDatetime(
- year=2012, month=11, day=22, hour=22, minute=11, second=10)
+ year=2012, month=11, day=22, hour=22, minute=11, second=10
+ )
date_hour_sec_z = utils.SafeDatetime(
- year=2012, month=11, day=22, hour=22, minute=11, second=10,
- tzinfo=timezone.utc)
+ year=2012,
+ month=11,
+ day=22,
+ hour=22,
+ minute=11,
+ second=10,
+ tzinfo=timezone.utc,
+ )
date_hour_sec_est = utils.SafeDatetime(
- year=2012, month=11, day=22, hour=22, minute=11, second=10,
- tzinfo=ZoneInfo("EST"))
+ year=2012,
+ month=11,
+ day=22,
+ hour=22,
+ minute=11,
+ second=10,
+ tzinfo=ZoneInfo("EST"),
+ )
date_hour_sec_frac_z = utils.SafeDatetime(
- year=2012, month=11, day=22, hour=22, minute=11, second=10,
- microsecond=123000, tzinfo=timezone.utc)
+ year=2012,
+ month=11,
+ day=22,
+ hour=22,
+ minute=11,
+ second=10,
+ microsecond=123000,
+ tzinfo=timezone.utc,
+ )
dates = {
- '2012-11-22': date,
- '2012/11/22': date,
- '2012-11-22 22:11': date_hour,
- '2012/11/22 22:11': date_hour,
- '22-11-2012': date,
- '22/11/2012': date,
- '22.11.2012': date,
- '22.11.2012 22:11': date_hour,
- '2012-11-22T22:11Z': date_hour_z,
- '2012-11-22T22:11-0500': date_hour_est,
- '2012-11-22 22:11:10': date_hour_sec,
- '2012-11-22T22:11:10Z': date_hour_sec_z,
- '2012-11-22T22:11:10-0500': date_hour_sec_est,
- '2012-11-22T22:11:10.123Z': date_hour_sec_frac_z,
+ "2012-11-22": date,
+ "2012/11/22": date,
+ "2012-11-22 22:11": date_hour,
+ "2012/11/22 22:11": date_hour,
+ "22-11-2012": date,
+ "22/11/2012": date,
+ "22.11.2012": date,
+ "22.11.2012 22:11": date_hour,
+ "2012-11-22T22:11Z": date_hour_z,
+ "2012-11-22T22:11-0500": date_hour_est,
+ "2012-11-22 22:11:10": date_hour_sec,
+ "2012-11-22T22:11:10Z": date_hour_sec_z,
+ "2012-11-22T22:11:10-0500": date_hour_sec_est,
+ "2012-11-22T22:11:10.123Z": date_hour_sec_frac_z,
}
# examples from http://www.w3.org/TR/NOTE-datetime
iso_8601_date = utils.SafeDatetime(year=1997, month=7, day=16)
iso_8601_date_hour_tz = utils.SafeDatetime(
- year=1997, month=7, day=16, hour=19, minute=20,
- tzinfo=ZoneInfo("Europe/London"))
+ year=1997,
+ month=7,
+ day=16,
+ hour=19,
+ minute=20,
+ tzinfo=ZoneInfo("Europe/London"),
+ )
iso_8601_date_hour_sec_tz = utils.SafeDatetime(
- year=1997, month=7, day=16, hour=19, minute=20, second=30,
- tzinfo=ZoneInfo("Europe/London"))
+ year=1997,
+ month=7,
+ day=16,
+ hour=19,
+ minute=20,
+ second=30,
+ tzinfo=ZoneInfo("Europe/London"),
+ )
iso_8601_date_hour_sec_ms_tz = utils.SafeDatetime(
- year=1997, month=7, day=16, hour=19, minute=20, second=30,
- microsecond=450000, tzinfo=ZoneInfo("Europe/London"))
+ year=1997,
+ month=7,
+ day=16,
+ hour=19,
+ minute=20,
+ second=30,
+ microsecond=450000,
+ tzinfo=ZoneInfo("Europe/London"),
+ )
iso_8601 = {
- '1997-07-16': iso_8601_date,
- '1997-07-16T19:20+01:00': iso_8601_date_hour_tz,
- '1997-07-16T19:20:30+01:00': iso_8601_date_hour_sec_tz,
- '1997-07-16T19:20:30.45+01:00': iso_8601_date_hour_sec_ms_tz,
+ "1997-07-16": iso_8601_date,
+ "1997-07-16T19:20+01:00": iso_8601_date_hour_tz,
+ "1997-07-16T19:20:30+01:00": iso_8601_date_hour_sec_tz,
+ "1997-07-16T19:20:30.45+01:00": iso_8601_date_hour_sec_ms_tz,
}
# invalid ones
- invalid_dates = ['2010-110-12', 'yay']
+ invalid_dates = ["2010-110-12", "yay"]
for value, expected in dates.items():
self.assertEqual(utils.get_date(value), expected, value)
@@ -115,219 +159,247 @@ def test_get_date(self):
self.assertRaises(ValueError, utils.get_date, item)
def test_slugify(self):
-
- samples = (('this is a test', 'this-is-a-test'),
- ('this is a test', 'this-is-a-test'),
- ('this → is ← a ↑ test', 'this-is-a-test'),
- ('this--is---a test', 'this-is-a-test'),
- ('unicode測試許功蓋,你看到了嗎?',
- 'unicodece-shi-xu-gong-gai-ni-kan-dao-liao-ma'),
- ('大飯原発4号機、18日夜起動へ',
- 'da-fan-yuan-fa-4hao-ji-18ri-ye-qi-dong-he'),)
+ samples = (
+ ("this is a test", "this-is-a-test"),
+ ("this is a test", "this-is-a-test"),
+ ("this → is ← a ↑ test", "this-is-a-test"),
+ ("this--is---a test", "this-is-a-test"),
+ (
+ "unicode測試許功蓋,你看到了嗎?",
+ "unicodece-shi-xu-gong-gai-ni-kan-dao-liao-ma",
+ ),
+ (
+ "大飯原発4号機、18日夜起動へ",
+ "da-fan-yuan-fa-4hao-ji-18ri-ye-qi-dong-he",
+ ),
+ )
settings = read_settings()
- subs = settings['SLUG_REGEX_SUBSTITUTIONS']
+ subs = settings["SLUG_REGEX_SUBSTITUTIONS"]
for value, expected in samples:
self.assertEqual(utils.slugify(value, regex_subs=subs), expected)
- self.assertEqual(utils.slugify('Cat', regex_subs=subs), 'cat')
+ self.assertEqual(utils.slugify("Cat", regex_subs=subs), "cat")
self.assertEqual(
- utils.slugify('Cat', regex_subs=subs, preserve_case=False), 'cat')
+ utils.slugify("Cat", regex_subs=subs, preserve_case=False), "cat"
+ )
self.assertEqual(
- utils.slugify('Cat', regex_subs=subs, preserve_case=True), 'Cat')
+ utils.slugify("Cat", regex_subs=subs, preserve_case=True), "Cat"
+ )
def test_slugify_use_unicode(self):
-
samples = (
- ('this is a test', 'this-is-a-test'),
- ('this is a test', 'this-is-a-test'),
- ('this → is ← a ↑ test', 'this-is-a-test'),
- ('this--is---a test', 'this-is-a-test'),
- ('unicode測試許功蓋,你看到了嗎?', 'unicode測試許功蓋你看到了嗎'),
- ('Çığ', 'çığ')
+ ("this is a test", "this-is-a-test"),
+ ("this is a test", "this-is-a-test"),
+ ("this → is ← a ↑ test", "this-is-a-test"),
+ ("this--is---a test", "this-is-a-test"),
+ ("unicode測試許功蓋,你看到了嗎?", "unicode測試許功蓋你看到了嗎"),
+ ("Çığ", "çığ"),
)
settings = read_settings()
- subs = settings['SLUG_REGEX_SUBSTITUTIONS']
+ subs = settings["SLUG_REGEX_SUBSTITUTIONS"]
for value, expected in samples:
self.assertEqual(
- utils.slugify(value, regex_subs=subs, use_unicode=True),
- expected)
+ utils.slugify(value, regex_subs=subs, use_unicode=True), expected
+ )
# check with preserve case
for value, expected in samples:
self.assertEqual(
- utils.slugify('Çığ', regex_subs=subs,
- preserve_case=True, use_unicode=True),
- 'Çığ')
+ utils.slugify(
+ "Çığ", regex_subs=subs, preserve_case=True, use_unicode=True
+ ),
+ "Çığ",
+ )
# check normalization
samples = (
- ('大飯原発4号機、18日夜起動へ', '大飯原発4号機18日夜起動へ'),
+ ("大飯原発4号機、18日夜起動へ", "大飯原発4号機18日夜起動へ"),
(
- '\N{LATIN SMALL LETTER C}\N{COMBINING CEDILLA}',
- '\N{LATIN SMALL LETTER C WITH CEDILLA}'
- )
+ "\N{LATIN SMALL LETTER C}\N{COMBINING CEDILLA}",
+ "\N{LATIN SMALL LETTER C WITH CEDILLA}",
+ ),
)
for value, expected in samples:
self.assertEqual(
- utils.slugify(value, regex_subs=subs, use_unicode=True),
- expected)
+ utils.slugify(value, regex_subs=subs, use_unicode=True), expected
+ )
def test_slugify_substitute(self):
-
- samples = (('C++ is based on C', 'cpp-is-based-on-c'),
- ('C+++ test C+ test', 'cpp-test-c-test'),
- ('c++, c#, C#, C++', 'cpp-c-sharp-c-sharp-cpp'),
- ('c++-streams', 'cpp-streams'),)
+ samples = (
+ ("C++ is based on C", "cpp-is-based-on-c"),
+ ("C+++ test C+ test", "cpp-test-c-test"),
+ ("c++, c#, C#, C++", "cpp-c-sharp-c-sharp-cpp"),
+ ("c++-streams", "cpp-streams"),
+ )
settings = read_settings()
subs = [
- (r'C\+\+', 'CPP'),
- (r'C#', 'C-SHARP'),
- ] + settings['SLUG_REGEX_SUBSTITUTIONS']
+ (r"C\+\+", "CPP"),
+ (r"C#", "C-SHARP"),
+ ] + settings["SLUG_REGEX_SUBSTITUTIONS"]
for value, expected in samples:
self.assertEqual(utils.slugify(value, regex_subs=subs), expected)
def test_slugify_substitute_and_keeping_non_alphanum(self):
-
- samples = (('Fedora QA', 'fedora.qa'),
- ('C++ is used by Fedora QA', 'cpp is used by fedora.qa'),
- ('C++ is based on C', 'cpp is based on c'),
- ('C+++ test C+ test', 'cpp+ test c+ test'),)
+ samples = (
+ ("Fedora QA", "fedora.qa"),
+ ("C++ is used by Fedora QA", "cpp is used by fedora.qa"),
+ ("C++ is based on C", "cpp is based on c"),
+ ("C+++ test C+ test", "cpp+ test c+ test"),
+ )
subs = [
- (r'Fedora QA', 'fedora.qa'),
- (r'c\+\+', 'cpp'),
+ (r"Fedora QA", "fedora.qa"),
+ (r"c\+\+", "cpp"),
]
for value, expected in samples:
self.assertEqual(utils.slugify(value, regex_subs=subs), expected)
def test_get_relative_path(self):
-
- samples = ((os.path.join('test', 'test.html'), os.pardir),
- (os.path.join('test', 'test', 'test.html'),
- os.path.join(os.pardir, os.pardir)),
- ('test.html', os.curdir),
- (os.path.join('/test', 'test.html'), os.pardir),
- (os.path.join('/test', 'test', 'test.html'),
- os.path.join(os.pardir, os.pardir)),
- ('/test.html', os.curdir),)
+ samples = (
+ (os.path.join("test", "test.html"), os.pardir),
+ (
+ os.path.join("test", "test", "test.html"),
+ os.path.join(os.pardir, os.pardir),
+ ),
+ ("test.html", os.curdir),
+ (os.path.join("/test", "test.html"), os.pardir),
+ (
+ os.path.join("/test", "test", "test.html"),
+ os.path.join(os.pardir, os.pardir),
+ ),
+ ("/test.html", os.curdir),
+ )
for value, expected in samples:
self.assertEqual(utils.get_relative_path(value), expected)
def test_truncate_html_words(self):
# Plain text.
+ self.assertEqual(utils.truncate_html_words("short string", 20), "short string")
self.assertEqual(
- utils.truncate_html_words('short string', 20),
- 'short string')
- self.assertEqual(
- utils.truncate_html_words('word ' * 100, 20),
- 'word ' * 20 + '…')
+ utils.truncate_html_words("word " * 100, 20), "word " * 20 + "…"
+ )
# Plain text with Unicode content.
self.assertEqual(
utils.truncate_html_words(
- '我愿意这样,朋友——我独自远行,不但没有你,\
- 并且再没有别的影在黑暗里。', 12
+ "我愿意这样,朋友——我独自远行,不但没有你,\
+ 并且再没有别的影在黑暗里。",
+ 12,
),
- '我愿意这样,朋友——我独自远行' + ' …')
+ "我愿意这样,朋友——我独自远行" + " …",
+ )
self.assertEqual(
utils.truncate_html_words(
- 'Ты мелькнула, ты предстала, Снова сердце задрожало,', 3
+ "Ты мелькнула, ты предстала, Снова сердце задрожало,", 3
),
- 'Ты мелькнула, ты' + ' …')
+ "Ты мелькнула, ты" + " …",
+ )
self.assertEqual(
- utils.truncate_html_words(
- 'Trong đầm gì đẹp bằng sen', 4
- ),
- 'Trong đầm gì đẹp' + ' …')
+ utils.truncate_html_words("Trong đầm gì đẹp bằng sen", 4),
+ "Trong đầm gì đẹp" + " …",
+ )
# Words enclosed or intervaled by HTML tags.
self.assertEqual(
- utils.truncate_html_words('<p>' + 'word ' * 100 + '</p>', 20),
- '<p>' + 'word ' * 20 + '…</p>')
+ utils.truncate_html_words("<p>" + "word " * 100 + "</p>", 20),
+ "<p>" + "word " * 20 + "…</p>",
+ )
self.assertEqual(
utils.truncate_html_words(
- '<span\nstyle="\n…\n">' + 'word ' * 100 + '</span>', 20),
- '<span\nstyle="\n…\n">' + 'word ' * 20 + '…</span>')
+ '<span\nstyle="\n…\n">' + "word " * 100 + "</span>", 20
+ ),
+ '<span\nstyle="\n…\n">' + "word " * 20 + "…</span>",
+ )
self.assertEqual(
- utils.truncate_html_words('<br>' + 'word ' * 100, 20),
- '<br>' + 'word ' * 20 + '…')
+ utils.truncate_html_words("<br>" + "word " * 100, 20),
+ "<br>" + "word " * 20 + "…",
+ )
self.assertEqual(
- utils.truncate_html_words('<!-- comment -->' + 'word ' * 100, 20),
- '<!-- comment -->' + 'word ' * 20 + '…')
+ utils.truncate_html_words("<!-- comment -->" + "word " * 100, 20),
+ "<!-- comment -->" + "word " * 20 + "…",
+ )
# Words enclosed or intervaled by HTML tags with a custom end
# marker containing HTML tags.
self.assertEqual(
- utils.truncate_html_words('<p>' + 'word ' * 100 + '</p>', 20,
- '<span>marker</span>'),
- '<p>' + 'word ' * 20 + '<span>marker</span></p>')
+ utils.truncate_html_words(
+ "<p>" + "word " * 100 + "</p>", 20, "<span>marker</span>"
+ ),
+ "<p>" + "word " * 20 + "<span>marker</span></p>",
+ )
self.assertEqual(
utils.truncate_html_words(
- '<span\nstyle="\n…\n">' + 'word ' * 100 + '</span>', 20,
- '<span>marker</span>'),
- '<span\nstyle="\n…\n">' + 'word ' * 20 + '<span>marker</span></span>')
+ '<span\nstyle="\n…\n">' + "word " * 100 + "</span>",
+ 20,
+ "<span>marker</span>",
+ ),
+ '<span\nstyle="\n…\n">' + "word " * 20 + "<span>marker</span></span>",
+ )
self.assertEqual(
- utils.truncate_html_words('<br>' + 'word ' * 100, 20,
- '<span>marker</span>'),
- '<br>' + 'word ' * 20 + '<span>marker</span>')
+ utils.truncate_html_words(
+ "<br>" + "word " * 100, 20, "<span>marker</span>"
+ ),
+ "<br>" + "word " * 20 + "<span>marker</span>",
+ )
self.assertEqual(
- utils.truncate_html_words('<!-- comment -->' + 'word ' * 100, 20,
- '<span>marker</span>'),
- '<!-- comment -->' + 'word ' * 20 + '<span>marker</span>')
+ utils.truncate_html_words(
+ "<!-- comment -->" + "word " * 100, 20, "<span>marker</span>"
+ ),
+ "<!-- comment -->" + "word " * 20 + "<span>marker</span>",
+ )
# Words with hypens and apostrophes.
+ self.assertEqual(utils.truncate_html_words("a-b " * 100, 20), "a-b " * 20 + "…")
self.assertEqual(
- utils.truncate_html_words("a-b " * 100, 20),
- "a-b " * 20 + '…')
- self.assertEqual(
- utils.truncate_html_words("it's " * 100, 20),
- "it's " * 20 + '…')
+ utils.truncate_html_words("it's " * 100, 20), "it's " * 20 + "…"
+ )
# Words with HTML entity references.
self.assertEqual(
- utils.truncate_html_words("é " * 100, 20),
- "é " * 20 + '…')
+ utils.truncate_html_words("é " * 100, 20), "é " * 20 + "…"
+ )
self.assertEqual(
utils.truncate_html_words("café " * 100, 20),
- "café " * 20 + '…')
+ "café " * 20 + "…",
+ )
self.assertEqual(
utils.truncate_html_words("èlite " * 100, 20),
- "èlite " * 20 + '…')
+ "èlite " * 20 + "…",
+ )
self.assertEqual(
utils.truncate_html_words("cafetiére " * 100, 20),
- "cafetiére " * 20 + '…')
+ "cafetiére " * 20 + "…",
+ )
self.assertEqual(
- utils.truncate_html_words("∫dx " * 100, 20),
- "∫dx " * 20 + '…')
+ utils.truncate_html_words("∫dx " * 100, 20), "∫dx " * 20 + "…"
+ )
# Words with HTML character references inside and outside
# the ASCII range.
self.assertEqual(
- utils.truncate_html_words("é " * 100, 20),
- "é " * 20 + '…')
+ utils.truncate_html_words("é " * 100, 20), "é " * 20 + "…"
+ )
self.assertEqual(
- utils.truncate_html_words("∫dx " * 100, 20),
- "∫dx " * 20 + '…')
+ utils.truncate_html_words("∫dx " * 100, 20), "∫dx " * 20 + "…"
+ )
# Words with invalid or broken HTML references.
+ self.assertEqual(utils.truncate_html_words("&invalid;", 20), "&invalid;")
self.assertEqual(
- utils.truncate_html_words('&invalid;', 20), '&invalid;')
- self.assertEqual(
- utils.truncate_html_words('�', 20), '�')
- self.assertEqual(
- utils.truncate_html_words('�', 20), '�')
- self.assertEqual(
- utils.truncate_html_words('&mdash text', 20), '&mdash text')
- self.assertEqual(
- utils.truncate_html_words('Ӓ text', 20), 'Ӓ text')
+ utils.truncate_html_words("�", 20), "�"
+ )
self.assertEqual(
- utils.truncate_html_words('઼ text', 20), '઼ text')
+ utils.truncate_html_words("�", 20), "�"
+ )
+ self.assertEqual(utils.truncate_html_words("&mdash text", 20), "&mdash text")
+ self.assertEqual(utils.truncate_html_words("Ӓ text", 20), "Ӓ text")
+ self.assertEqual(utils.truncate_html_words("઼ text", 20), "઼ text")
def test_process_translations(self):
fr_articles = []
@@ -335,65 +407,135 @@ def test_process_translations(self):
# create a bunch of articles
# 0: no translation metadata
- fr_articles.append(get_article(lang='fr', slug='yay0', title='Titre',
- content='en français'))
- en_articles.append(get_article(lang='en', slug='yay0', title='Title',
- content='in english'))
+ fr_articles.append(
+ get_article(lang="fr", slug="yay0", title="Titre", content="en français")
+ )
+ en_articles.append(
+ get_article(lang="en", slug="yay0", title="Title", content="in english")
+ )
# 1: translation metadata on default lang
- fr_articles.append(get_article(lang='fr', slug='yay1', title='Titre',
- content='en français'))
- en_articles.append(get_article(lang='en', slug='yay1', title='Title',
- content='in english',
- translation='true'))
+ fr_articles.append(
+ get_article(lang="fr", slug="yay1", title="Titre", content="en français")
+ )
+ en_articles.append(
+ get_article(
+ lang="en",
+ slug="yay1",
+ title="Title",
+ content="in english",
+ translation="true",
+ )
+ )
# 2: translation metadata not on default lang
- fr_articles.append(get_article(lang='fr', slug='yay2', title='Titre',
- content='en français',
- translation='true'))
- en_articles.append(get_article(lang='en', slug='yay2', title='Title',
- content='in english'))
+ fr_articles.append(
+ get_article(
+ lang="fr",
+ slug="yay2",
+ title="Titre",
+ content="en français",
+ translation="true",
+ )
+ )
+ en_articles.append(
+ get_article(lang="en", slug="yay2", title="Title", content="in english")
+ )
# 3: back to default language detection if all items have the
# translation metadata
- fr_articles.append(get_article(lang='fr', slug='yay3', title='Titre',
- content='en français',
- translation='yep'))
- en_articles.append(get_article(lang='en', slug='yay3', title='Title',
- content='in english',
- translation='yes'))
+ fr_articles.append(
+ get_article(
+ lang="fr",
+ slug="yay3",
+ title="Titre",
+ content="en français",
+ translation="yep",
+ )
+ )
+ en_articles.append(
+ get_article(
+ lang="en",
+ slug="yay3",
+ title="Title",
+ content="in english",
+ translation="yes",
+ )
+ )
# 4-5: translation pairs with the same slug but different category
- fr_articles.append(get_article(lang='fr', slug='yay4', title='Titre',
- content='en français', category='foo'))
- en_articles.append(get_article(lang='en', slug='yay4', title='Title',
- content='in english', category='foo'))
- fr_articles.append(get_article(lang='fr', slug='yay4', title='Titre',
- content='en français', category='bar'))
- en_articles.append(get_article(lang='en', slug='yay4', title='Title',
- content='in english', category='bar'))
+ fr_articles.append(
+ get_article(
+ lang="fr",
+ slug="yay4",
+ title="Titre",
+ content="en français",
+ category="foo",
+ )
+ )
+ en_articles.append(
+ get_article(
+ lang="en",
+ slug="yay4",
+ title="Title",
+ content="in english",
+ category="foo",
+ )
+ )
+ fr_articles.append(
+ get_article(
+ lang="fr",
+ slug="yay4",
+ title="Titre",
+ content="en français",
+ category="bar",
+ )
+ )
+ en_articles.append(
+ get_article(
+ lang="en",
+ slug="yay4",
+ title="Title",
+ content="in english",
+ category="bar",
+ )
+ )
# try adding articles in both orders
- for lang0_articles, lang1_articles in ((fr_articles, en_articles),
- (en_articles, fr_articles)):
+ for lang0_articles, lang1_articles in (
+ (fr_articles, en_articles),
+ (en_articles, fr_articles),
+ ):
articles = lang0_articles + lang1_articles
# test process_translations with falsy translation_id
- index, trans = utils.process_translations(
- articles, translation_id=None)
+ index, trans = utils.process_translations(articles, translation_id=None)
for i in range(6):
for lang_articles in [en_articles, fr_articles]:
self.assertIn(lang_articles[i], index)
self.assertNotIn(lang_articles[i], trans)
# test process_translations with simple and complex translation_id
- for translation_id in ['slug', {'slug', 'category'}]:
+ for translation_id in ["slug", {"slug", "category"}]:
index, trans = utils.process_translations(
- articles, translation_id=translation_id)
-
- for a in [en_articles[0], fr_articles[1], en_articles[2],
- en_articles[3], en_articles[4], en_articles[5]]:
+ articles, translation_id=translation_id
+ )
+
+ for a in [
+ en_articles[0],
+ fr_articles[1],
+ en_articles[2],
+ en_articles[3],
+ en_articles[4],
+ en_articles[5],
+ ]:
self.assertIn(a, index)
self.assertNotIn(a, trans)
- for a in [fr_articles[0], en_articles[1], fr_articles[2],
- fr_articles[3], fr_articles[4], fr_articles[5]]:
+ for a in [
+ fr_articles[0],
+ en_articles[1],
+ fr_articles[2],
+ fr_articles[3],
+ fr_articles[4],
+ fr_articles[5],
+ ]:
self.assertIn(a, trans)
self.assertNotIn(a, index)
@@ -403,18 +545,17 @@ def test_process_translations(self):
for a_arts in [en_articles, fr_articles]:
for b_arts in [en_articles, fr_articles]:
- if translation_id == 'slug':
+ if translation_id == "slug":
self.assertIn(a_arts[4], b_arts[5].translations)
self.assertIn(a_arts[5], b_arts[4].translations)
- elif translation_id == {'slug', 'category'}:
+ elif translation_id == {"slug", "category"}:
self.assertNotIn(a_arts[4], b_arts[5].translations)
self.assertNotIn(a_arts[5], b_arts[4].translations)
def test_clean_output_dir(self):
retention = ()
- test_directory = os.path.join(self.temp_output,
- 'clean_output')
- content = os.path.join(os.path.dirname(__file__), 'content')
+ test_directory = os.path.join(self.temp_output, "clean_output")
+ content = os.path.join(os.path.dirname(__file__), "content")
shutil.copytree(content, test_directory)
utils.clean_output_dir(test_directory, retention)
self.assertTrue(os.path.isdir(test_directory))
@@ -423,17 +564,15 @@ def test_clean_output_dir(self):
def test_clean_output_dir_not_there(self):
retention = ()
- test_directory = os.path.join(self.temp_output,
- 'does_not_exist')
+ test_directory = os.path.join(self.temp_output, "does_not_exist")
utils.clean_output_dir(test_directory, retention)
self.assertFalse(os.path.exists(test_directory))
def test_clean_output_dir_is_file(self):
retention = ()
- test_directory = os.path.join(self.temp_output,
- 'this_is_a_file')
- f = open(test_directory, 'w')
- f.write('')
+ test_directory = os.path.join(self.temp_output, "this_is_a_file")
+ f = open(test_directory, "w")
+ f.write("")
f.close()
utils.clean_output_dir(test_directory, retention)
self.assertFalse(os.path.exists(test_directory))
@@ -442,223 +581,230 @@ def test_strftime(self):
d = utils.SafeDatetime(2012, 8, 29)
# simple formatting
- self.assertEqual(utils.strftime(d, '%d/%m/%y'), '29/08/12')
- self.assertEqual(utils.strftime(d, '%d/%m/%Y'), '29/08/2012')
+ self.assertEqual(utils.strftime(d, "%d/%m/%y"), "29/08/12")
+ self.assertEqual(utils.strftime(d, "%d/%m/%Y"), "29/08/2012")
# RFC 3339
self.assertEqual(
- utils.strftime(d, '%Y-%m-%dT%H:%M:%SZ'),
- '2012-08-29T00:00:00Z')
+ utils.strftime(d, "%Y-%m-%dT%H:%M:%SZ"), "2012-08-29T00:00:00Z"
+ )
# % escaped
- self.assertEqual(utils.strftime(d, '%d%%%m%%%y'), '29%08%12')
- self.assertEqual(utils.strftime(d, '%d %% %m %% %y'), '29 % 08 % 12')
+ self.assertEqual(utils.strftime(d, "%d%%%m%%%y"), "29%08%12")
+ self.assertEqual(utils.strftime(d, "%d %% %m %% %y"), "29 % 08 % 12")
# not valid % formatter
- self.assertEqual(utils.strftime(d, '10% reduction in %Y'),
- '10% reduction in 2012')
- self.assertEqual(utils.strftime(d, '%10 reduction in %Y'),
- '%10 reduction in 2012')
+ self.assertEqual(
+ utils.strftime(d, "10% reduction in %Y"), "10% reduction in 2012"
+ )
+ self.assertEqual(
+ utils.strftime(d, "%10 reduction in %Y"), "%10 reduction in 2012"
+ )
# with text
- self.assertEqual(utils.strftime(d, 'Published in %d-%m-%Y'),
- 'Published in 29-08-2012')
+ self.assertEqual(
+ utils.strftime(d, "Published in %d-%m-%Y"), "Published in 29-08-2012"
+ )
# with non-ascii text
self.assertEqual(
- utils.strftime(d, '%d/%m/%Y Øl trinken beim Besäufnis'),
- '29/08/2012 Øl trinken beim Besäufnis')
+ utils.strftime(d, "%d/%m/%Y Øl trinken beim Besäufnis"),
+ "29/08/2012 Øl trinken beim Besäufnis",
+ )
# alternative formatting options
- self.assertEqual(utils.strftime(d, '%-d/%-m/%y'), '29/8/12')
- self.assertEqual(utils.strftime(d, '%-H:%-M:%-S'), '0:0:0')
+ self.assertEqual(utils.strftime(d, "%-d/%-m/%y"), "29/8/12")
+ self.assertEqual(utils.strftime(d, "%-H:%-M:%-S"), "0:0:0")
d = utils.SafeDatetime(2012, 8, 9)
- self.assertEqual(utils.strftime(d, '%-d/%-m/%y'), '9/8/12')
+ self.assertEqual(utils.strftime(d, "%-d/%-m/%y"), "9/8/12")
d = utils.SafeDatetime(2021, 1, 8)
- self.assertEqual(utils.strftime(d, '%G - %-V - %u'), '2021 - 1 - 5')
+ self.assertEqual(utils.strftime(d, "%G - %-V - %u"), "2021 - 1 - 5")
# test the output of utils.strftime in a different locale
# Turkish locale
- @unittest.skipUnless(locale_available('tr_TR.UTF-8') or
- locale_available('Turkish'),
- 'Turkish locale needed')
+ @unittest.skipUnless(
+ locale_available("tr_TR.UTF-8") or locale_available("Turkish"),
+ "Turkish locale needed",
+ )
def test_strftime_locale_dependent_turkish(self):
- temp_locale = 'Turkish' if platform == 'win32' else 'tr_TR.UTF-8'
+ temp_locale = "Turkish" if platform == "win32" else "tr_TR.UTF-8"
with utils.temporary_locale(temp_locale):
d = utils.SafeDatetime(2012, 8, 29)
# simple
- self.assertEqual(utils.strftime(d, '%d %B %Y'), '29 Ağustos 2012')
- self.assertEqual(utils.strftime(d, '%A, %d %B %Y'),
- 'Çarşamba, 29 Ağustos 2012')
+ self.assertEqual(utils.strftime(d, "%d %B %Y"), "29 Ağustos 2012")
+ self.assertEqual(
+ utils.strftime(d, "%A, %d %B %Y"), "Çarşamba, 29 Ağustos 2012"
+ )
# with text
self.assertEqual(
- utils.strftime(d, 'Yayınlanma tarihi: %A, %d %B %Y'),
- 'Yayınlanma tarihi: Çarşamba, 29 Ağustos 2012')
+ utils.strftime(d, "Yayınlanma tarihi: %A, %d %B %Y"),
+ "Yayınlanma tarihi: Çarşamba, 29 Ağustos 2012",
+ )
# non-ascii format candidate (someone might pass it… for some reason)
self.assertEqual(
- utils.strftime(d, '%Y yılında %üretim artışı'),
- '2012 yılında %üretim artışı')
+ utils.strftime(d, "%Y yılında %üretim artışı"),
+ "2012 yılında %üretim artışı",
+ )
# test the output of utils.strftime in a different locale
# French locale
- @unittest.skipUnless(locale_available('fr_FR.UTF-8') or
- locale_available('French'),
- 'French locale needed')
+ @unittest.skipUnless(
+ locale_available("fr_FR.UTF-8") or locale_available("French"),
+ "French locale needed",
+ )
def test_strftime_locale_dependent_french(self):
- temp_locale = 'French' if platform == 'win32' else 'fr_FR.UTF-8'
+ temp_locale = "French" if platform == "win32" else "fr_FR.UTF-8"
with utils.temporary_locale(temp_locale):
d = utils.SafeDatetime(2012, 8, 29)
# simple
- self.assertEqual(utils.strftime(d, '%d %B %Y'), '29 août 2012')
+ self.assertEqual(utils.strftime(d, "%d %B %Y"), "29 août 2012")
# depending on OS, the first letter is m or M
- self.assertTrue(utils.strftime(d, '%A') in ('mercredi', 'Mercredi'))
+ self.assertTrue(utils.strftime(d, "%A") in ("mercredi", "Mercredi"))
# with text
self.assertEqual(
- utils.strftime(d, 'Écrit le %d %B %Y'),
- 'Écrit le 29 août 2012')
+ utils.strftime(d, "Écrit le %d %B %Y"), "Écrit le 29 août 2012"
+ )
# non-ascii format candidate (someone might pass it… for some reason)
- self.assertEqual(
- utils.strftime(d, '%écrits en %Y'),
- '%écrits en 2012')
+ self.assertEqual(utils.strftime(d, "%écrits en %Y"), "%écrits en 2012")
def test_maybe_pluralize(self):
- self.assertEqual(
- utils.maybe_pluralize(0, 'Article', 'Articles'),
- '0 Articles')
- self.assertEqual(
- utils.maybe_pluralize(1, 'Article', 'Articles'),
- '1 Article')
- self.assertEqual(
- utils.maybe_pluralize(2, 'Article', 'Articles'),
- '2 Articles')
+ self.assertEqual(utils.maybe_pluralize(0, "Article", "Articles"), "0 Articles")
+ self.assertEqual(utils.maybe_pluralize(1, "Article", "Articles"), "1 Article")
+ self.assertEqual(utils.maybe_pluralize(2, "Article", "Articles"), "2 Articles")
def test_temporary_locale(self):
# test with default LC category
orig_locale = locale.setlocale(locale.LC_ALL)
- with utils.temporary_locale('C'):
- self.assertEqual(locale.setlocale(locale.LC_ALL), 'C')
+ with utils.temporary_locale("C"):
+ self.assertEqual(locale.setlocale(locale.LC_ALL), "C")
self.assertEqual(locale.setlocale(locale.LC_ALL), orig_locale)
# test with custom LC category
orig_locale = locale.setlocale(locale.LC_TIME)
- with utils.temporary_locale('C', locale.LC_TIME):
- self.assertEqual(locale.setlocale(locale.LC_TIME), 'C')
+ with utils.temporary_locale("C", locale.LC_TIME):
+ self.assertEqual(locale.setlocale(locale.LC_TIME), "C")
self.assertEqual(locale.setlocale(locale.LC_TIME), orig_locale)
class TestCopy(unittest.TestCase):
- '''Tests the copy utility'''
+ """Tests the copy utility"""
def setUp(self):
- self.root_dir = mkdtemp(prefix='pelicantests.')
+ self.root_dir = mkdtemp(prefix="pelicantests.")
self.old_locale = locale.setlocale(locale.LC_ALL)
- locale.setlocale(locale.LC_ALL, 'C')
+ locale.setlocale(locale.LC_ALL, "C")
def tearDown(self):
shutil.rmtree(self.root_dir)
locale.setlocale(locale.LC_ALL, self.old_locale)
def _create_file(self, *path):
- with open(os.path.join(self.root_dir, *path), 'w') as f:
- f.write('42\n')
+ with open(os.path.join(self.root_dir, *path), "w") as f:
+ f.write("42\n")
def _create_dir(self, *path):
os.makedirs(os.path.join(self.root_dir, *path))
def _exist_file(self, *path):
path = os.path.join(self.root_dir, *path)
- self.assertTrue(os.path.isfile(path), 'File does not exist: %s' % path)
+ self.assertTrue(os.path.isfile(path), "File does not exist: %s" % path)
def _exist_dir(self, *path):
path = os.path.join(self.root_dir, *path)
- self.assertTrue(os.path.exists(path),
- 'Directory does not exist: %s' % path)
+ self.assertTrue(os.path.exists(path), "Directory does not exist: %s" % path)
def test_copy_file_same_path(self):
- self._create_file('a.txt')
- utils.copy(os.path.join(self.root_dir, 'a.txt'),
- os.path.join(self.root_dir, 'b.txt'))
- self._exist_file('b.txt')
+ self._create_file("a.txt")
+ utils.copy(
+ os.path.join(self.root_dir, "a.txt"), os.path.join(self.root_dir, "b.txt")
+ )
+ self._exist_file("b.txt")
def test_copy_file_different_path(self):
- self._create_dir('a')
- self._create_dir('b')
- self._create_file('a', 'a.txt')
- utils.copy(os.path.join(self.root_dir, 'a', 'a.txt'),
- os.path.join(self.root_dir, 'b', 'b.txt'))
- self._exist_dir('b')
- self._exist_file('b', 'b.txt')
+ self._create_dir("a")
+ self._create_dir("b")
+ self._create_file("a", "a.txt")
+ utils.copy(
+ os.path.join(self.root_dir, "a", "a.txt"),
+ os.path.join(self.root_dir, "b", "b.txt"),
+ )
+ self._exist_dir("b")
+ self._exist_file("b", "b.txt")
def test_copy_file_create_dirs(self):
- self._create_file('a.txt')
+ self._create_file("a.txt")
utils.copy(
- os.path.join(self.root_dir, 'a.txt'),
- os.path.join(self.root_dir, 'b0', 'b1', 'b2', 'b3', 'b.txt'))
- self._exist_dir('b0')
- self._exist_dir('b0', 'b1')
- self._exist_dir('b0', 'b1', 'b2')
- self._exist_dir('b0', 'b1', 'b2', 'b3')
- self._exist_file('b0', 'b1', 'b2', 'b3', 'b.txt')
+ os.path.join(self.root_dir, "a.txt"),
+ os.path.join(self.root_dir, "b0", "b1", "b2", "b3", "b.txt"),
+ )
+ self._exist_dir("b0")
+ self._exist_dir("b0", "b1")
+ self._exist_dir("b0", "b1", "b2")
+ self._exist_dir("b0", "b1", "b2", "b3")
+ self._exist_file("b0", "b1", "b2", "b3", "b.txt")
def test_copy_dir_same_path(self):
- self._create_dir('a')
- self._create_file('a', 'a.txt')
- utils.copy(os.path.join(self.root_dir, 'a'),
- os.path.join(self.root_dir, 'b'))
- self._exist_dir('b')
- self._exist_file('b', 'a.txt')
+ self._create_dir("a")
+ self._create_file("a", "a.txt")
+ utils.copy(os.path.join(self.root_dir, "a"), os.path.join(self.root_dir, "b"))
+ self._exist_dir("b")
+ self._exist_file("b", "a.txt")
def test_copy_dir_different_path(self):
- self._create_dir('a0')
- self._create_dir('a0', 'a1')
- self._create_file('a0', 'a1', 'a.txt')
- self._create_dir('b0')
- utils.copy(os.path.join(self.root_dir, 'a0', 'a1'),
- os.path.join(self.root_dir, 'b0', 'b1'))
- self._exist_dir('b0', 'b1')
- self._exist_file('b0', 'b1', 'a.txt')
+ self._create_dir("a0")
+ self._create_dir("a0", "a1")
+ self._create_file("a0", "a1", "a.txt")
+ self._create_dir("b0")
+ utils.copy(
+ os.path.join(self.root_dir, "a0", "a1"),
+ os.path.join(self.root_dir, "b0", "b1"),
+ )
+ self._exist_dir("b0", "b1")
+ self._exist_file("b0", "b1", "a.txt")
def test_copy_dir_create_dirs(self):
- self._create_dir('a')
- self._create_file('a', 'a.txt')
- utils.copy(os.path.join(self.root_dir, 'a'),
- os.path.join(self.root_dir, 'b0', 'b1', 'b2', 'b3', 'b'))
- self._exist_dir('b0')
- self._exist_dir('b0', 'b1')
- self._exist_dir('b0', 'b1', 'b2')
- self._exist_dir('b0', 'b1', 'b2', 'b3')
- self._exist_dir('b0', 'b1', 'b2', 'b3', 'b')
- self._exist_file('b0', 'b1', 'b2', 'b3', 'b', 'a.txt')
+ self._create_dir("a")
+ self._create_file("a", "a.txt")
+ utils.copy(
+ os.path.join(self.root_dir, "a"),
+ os.path.join(self.root_dir, "b0", "b1", "b2", "b3", "b"),
+ )
+ self._exist_dir("b0")
+ self._exist_dir("b0", "b1")
+ self._exist_dir("b0", "b1", "b2")
+ self._exist_dir("b0", "b1", "b2", "b3")
+ self._exist_dir("b0", "b1", "b2", "b3", "b")
+ self._exist_file("b0", "b1", "b2", "b3", "b", "a.txt")
class TestDateFormatter(unittest.TestCase):
- '''Tests that the output of DateFormatter jinja filter is same as
- utils.strftime'''
+ """Tests that the output of DateFormatter jinja filter is same as
+ utils.strftime"""
def setUp(self):
# prepare a temp content and output folder
- self.temp_content = mkdtemp(prefix='pelicantests.')
- self.temp_output = mkdtemp(prefix='pelicantests.')
+ self.temp_content = mkdtemp(prefix="pelicantests.")
+ self.temp_output = mkdtemp(prefix="pelicantests.")
# prepare a template file
- template_dir = os.path.join(self.temp_content, 'template')
- template_path = os.path.join(template_dir, 'source.html')
+ template_dir = os.path.join(self.temp_content, "template")
+ template_path = os.path.join(template_dir, "source.html")
os.makedirs(template_dir)
- with open(template_path, 'w') as template_file:
+ with open(template_path, "w") as template_file:
template_file.write('date = {{ date|strftime("%A, %d %B %Y") }}')
self.date = utils.SafeDatetime(2012, 8, 29)
@@ -666,136 +812,128 @@ def tearDown(self):
shutil.rmtree(self.temp_content)
shutil.rmtree(self.temp_output)
# reset locale to default
- locale.setlocale(locale.LC_ALL, '')
+ locale.setlocale(locale.LC_ALL, "")
- @unittest.skipUnless(locale_available('fr_FR.UTF-8') or
- locale_available('French'),
- 'French locale needed')
+ @unittest.skipUnless(
+ locale_available("fr_FR.UTF-8") or locale_available("French"),
+ "French locale needed",
+ )
def test_french_strftime(self):
# This test tries to reproduce an issue that
# occurred with python3.3 under macos10 only
- temp_locale = 'French' if platform == 'win32' else 'fr_FR.UTF-8'
+ temp_locale = "French" if platform == "win32" else "fr_FR.UTF-8"
with utils.temporary_locale(temp_locale):
date = utils.SafeDatetime(2014, 8, 14)
# we compare the lower() dates since macos10 returns
# "Jeudi" for %A whereas linux reports "jeudi"
self.assertEqual(
- 'jeudi, 14 août 2014',
- utils.strftime(date, date_format="%A, %d %B %Y").lower())
+ "jeudi, 14 août 2014",
+ utils.strftime(date, date_format="%A, %d %B %Y").lower(),
+ )
df = utils.DateFormatter()
self.assertEqual(
- 'jeudi, 14 août 2014',
- df(date, date_format="%A, %d %B %Y").lower())
+ "jeudi, 14 août 2014", df(date, date_format="%A, %d %B %Y").lower()
+ )
# Let us now set the global locale to C:
- with utils.temporary_locale('C'):
+ with utils.temporary_locale("C"):
# DateFormatter should still work as expected
# since it is the whole point of DateFormatter
# (This is where pre-2014/4/15 code fails on macos10)
df_date = df(date, date_format="%A, %d %B %Y").lower()
- self.assertEqual('jeudi, 14 août 2014', df_date)
+ self.assertEqual("jeudi, 14 août 2014", df_date)
- @unittest.skipUnless(locale_available('fr_FR.UTF-8') or
- locale_available('French'),
- 'French locale needed')
+ @unittest.skipUnless(
+ locale_available("fr_FR.UTF-8") or locale_available("French"),
+ "French locale needed",
+ )
def test_french_locale(self):
- if platform == 'win32':
- locale_string = 'French'
+ if platform == "win32":
+ locale_string = "French"
else:
- locale_string = 'fr_FR.UTF-8'
+ locale_string = "fr_FR.UTF-8"
settings = read_settings(
override={
- 'LOCALE': locale_string,
- 'TEMPLATE_PAGES': {
- 'template/source.html': 'generated/file.html'
- }
- })
+ "LOCALE": locale_string,
+ "TEMPLATE_PAGES": {"template/source.html": "generated/file.html"},
+ }
+ )
generator = TemplatePagesGenerator(
- {'date': self.date}, settings,
- self.temp_content, '', self.temp_output)
- generator.env.filters.update({'strftime': utils.DateFormatter()})
+ {"date": self.date}, settings, self.temp_content, "", self.temp_output
+ )
+ generator.env.filters.update({"strftime": utils.DateFormatter()})
writer = Writer(self.temp_output, settings=settings)
generator.generate_output(writer)
- output_path = os.path.join(
- self.temp_output, 'generated', 'file.html')
+ output_path = os.path.join(self.temp_output, "generated", "file.html")
# output file has been generated
self.assertTrue(os.path.exists(output_path))
# output content is correct
with utils.pelican_open(output_path) as output_file:
- self.assertEqual(output_file,
- utils.strftime(self.date, 'date = %A, %d %B %Y'))
+ self.assertEqual(
+ output_file, utils.strftime(self.date, "date = %A, %d %B %Y")
+ )
- @unittest.skipUnless(locale_available('tr_TR.UTF-8') or
- locale_available('Turkish'),
- 'Turkish locale needed')
+ @unittest.skipUnless(
+ locale_available("tr_TR.UTF-8") or locale_available("Turkish"),
+ "Turkish locale needed",
+ )
def test_turkish_locale(self):
- if platform == 'win32':
- locale_string = 'Turkish'
+ if platform == "win32":
+ locale_string = "Turkish"
else:
- locale_string = 'tr_TR.UTF-8'
+ locale_string = "tr_TR.UTF-8"
settings = read_settings(
override={
- 'LOCALE': locale_string,
- 'TEMPLATE_PAGES': {
- 'template/source.html': 'generated/file.html'
- }
- })
+ "LOCALE": locale_string,
+ "TEMPLATE_PAGES": {"template/source.html": "generated/file.html"},
+ }
+ )
generator = TemplatePagesGenerator(
- {'date': self.date}, settings,
- self.temp_content, '', self.temp_output)
- generator.env.filters.update({'strftime': utils.DateFormatter()})
+ {"date": self.date}, settings, self.temp_content, "", self.temp_output
+ )
+ generator.env.filters.update({"strftime": utils.DateFormatter()})
writer = Writer(self.temp_output, settings=settings)
generator.generate_output(writer)
- output_path = os.path.join(
- self.temp_output, 'generated', 'file.html')
+ output_path = os.path.join(self.temp_output, "generated", "file.html")
# output file has been generated
self.assertTrue(os.path.exists(output_path))
# output content is correct
with utils.pelican_open(output_path) as output_file:
- self.assertEqual(output_file,
- utils.strftime(self.date, 'date = %A, %d %B %Y'))
+ self.assertEqual(
+ output_file, utils.strftime(self.date, "date = %A, %d %B %Y")
+ )
class TestSanitisedJoin(unittest.TestCase):
def test_detect_parent_breakout(self):
with self.assertRaisesRegex(
- RuntimeError,
- "Attempted to break out of output directory to "
- "(.*?:)?/foo/test"): # (.*?:)? accounts for Windows root
- utils.sanitised_join(
- "/foo/bar",
- "../test"
- )
+ RuntimeError,
+ "Attempted to break out of output directory to " "(.*?:)?/foo/test",
+ ): # (.*?:)? accounts for Windows root
+ utils.sanitised_join("/foo/bar", "../test")
def test_detect_root_breakout(self):
with self.assertRaisesRegex(
- RuntimeError,
- "Attempted to break out of output directory to "
- "(.*?:)?/test"): # (.*?:)? accounts for Windows root
- utils.sanitised_join(
- "/foo/bar",
- "/test"
- )
+ RuntimeError,
+ "Attempted to break out of output directory to " "(.*?:)?/test",
+ ): # (.*?:)? accounts for Windows root
+ utils.sanitised_join("/foo/bar", "/test")
def test_pass_deep_subpaths(self):
self.assertEqual(
- utils.sanitised_join(
- "/foo/bar",
- "test"
- ),
- utils.posixize_path(
- os.path.abspath(os.path.join("/foo/bar", "test")))
+ utils.sanitised_join("/foo/bar", "test"),
+ utils.posixize_path(os.path.abspath(os.path.join("/foo/bar", "test"))),
)
@@ -812,7 +950,7 @@ def get(self, key):
container = Container()
with unittest.mock.patch.object(
- container, "_get", side_effect=lambda x: x
+ container, "_get", side_effect=lambda x: x
) as get_mock:
self.assertEqual("foo", container.get("foo"))
get_mock.assert_called_once_with("foo")
| Format all
Resolves: #3216
# Pull Request Checklist
- [X] Ensured **tests pass** and (if applicable) updated functional test output
- [X] Conformed to **code style guidelines** by running appropriate linting tools
Once this PR is _mostly_ approved, I will add a `.git-blame-ignore-revs` file with the revision of the bulk format commit.
After this, pre-commit should pass. This would supersede #3217 since that's included in here as well.
Replace Flake8 & isort with Ruff
## `invoke lint` yields error with flake8 6.0.0
I just tried to run `invoke lint` on the code. This resulted in `flake8` not being installed first (not installed by `invoke install`, followed by:
> flake8: error: unrecognized arguments: --diff
This is related to flake8 6.0.0 dropping the `--diff` option, see https://flake8.pycqa.org/en/latest/release-notes/6.0.0.html#backwards-incompatible-changes. Using `pip install "flake8<6.0.0"` works, but the functionality should probably be fixed to not rely on old versions any more.
|
Hi @FriedrichFroebel. The proper version of Flake8 for use with this repository is listed in the [pyproject.toml file](https://github.com/getpelican/pelican/blob/86f62d0a92ad78df36aac9f5837d4f2715535421/pyproject.toml#L61).
As for this bit…
> This is related to flake8 6.0.0 dropping the `--diff` option
Speaking of dropping, I think it's time to drop Flake8. I'm tired of these shenanigans and the repeatedly rude manner in which Flake8 maintainers conduct themselves. (I'm going to refrain from posting relevant links here, as I have neither the time nor the interest in getting mired in pointless Internet wars.)
So, Friedrich, I'm sorry for hijacking your issue, but I have changed the intended resolution of this issue accordingly. I have already done an experiment in a plugin repo in which I replaced Flake8 (and isort) with Ruff, and it seems to working very well so far: https://github.com/pelican-plugins/touch/pull/2/commits/f93876da3b8c30af4aa6c869349e995c0b2d0fbd
I intend to make a similar transition here in this repository when I can find the time.
Thanks for the explanations. I had missed the versions inside the `pyproject.toml` file, as I assumed that the `tasks.py` file would install the required stuff (which proved to be wrong). The CONTRIBUTING guide was of no big help here either for now.
> This resulted in `flake8` not being installed first (not installed by `invoke install`)…
There is no `invoke install` task. If you tried that, Invoke should have rightfully responded with: `No idea what 'install' is!`
> I assumed that the `tasks.py` file would install the required stuff (which proved to be wrong).
For me, it does indeed install the required stuff, including all needed linter tools, as long as the proper invocation is used:
invoke setup
> The CONTRIBUTING guide was of no big help here either for now.
All of the required steps should be meticulously described in the [Setting Up the Development Environment](https://docs.getpelican.com/en/latest/contribute.html#setting-up-the-development-environment) section of the documentation, including the above-mentioned `invoke setup` step.
Thanks for the hint. I actually assumed that https://github.com/getpelican/pelican/blob/master/CONTRIBUTING.rst would be the official contribution guide, as this is what GitHub automatically points to as well. Nevertheless, I managed to get it working somehow, but yes, good documentation is always appreciated. | 2023-10-29T21:27:30Z | [] | [] |
Chia-Network/chia-blockchain | 8,279 | Chia-Network__chia-blockchain-8279 | [
"8262"
] | 745ad49f7d3881e1e8673decc51f6aa5b505f0ca | diff --git a/chia/plotting/manager.py b/chia/plotting/manager.py
--- a/chia/plotting/manager.py
+++ b/chia/plotting/manager.py
@@ -162,9 +162,10 @@ def process_file(file_path: Path) -> Dict:
if file_path.exists():
if (
file_path in self.failed_to_open_filenames
- and (time.time() - self.failed_to_open_filenames[file_path]) > 1200
+ and (time.time() - self.failed_to_open_filenames[file_path])
+ < self.refresh_parameter.retry_invalid_seconds
):
- # Try once every 20 minutes to open the file
+ # Try once every `refresh_parameter.retry_invalid_seconds` seconds to open the file
return new_provers
if file_path in self.plots:
try:
@@ -268,6 +269,9 @@ def process_file(file_path: Path) -> Dict:
result.loaded_plots += 1
result.loaded_size += stat_info.st_size
+ if file_path in self.failed_to_open_filenames:
+ del self.failed_to_open_filenames[file_path]
+
except Exception as e:
tb = traceback.format_exc()
log.error(f"Failed to open file {file_path}. {e} {tb}")
diff --git a/chia/plotting/util.py b/chia/plotting/util.py
--- a/chia/plotting/util.py
+++ b/chia/plotting/util.py
@@ -16,6 +16,7 @@
@dataclass
class PlotsRefreshParameter:
interval_seconds: int = 120
+ retry_invalid_seconds: int = 1200
batch_size: int = 30
batch_sleep_milliseconds: int = 10
| diff --git a/tests/core/test_farmer_harvester_rpc.py b/tests/core/test_farmer_harvester_rpc.py
--- a/tests/core/test_farmer_harvester_rpc.py
+++ b/tests/core/test_farmer_harvester_rpc.py
@@ -1,6 +1,8 @@
# flake8: noqa: E501
import logging
+from pathlib import Path
from secrets import token_bytes
+from shutil import copy, move
import pytest
from blspy import AugSchemeMPL
@@ -198,6 +200,7 @@ async def test_case(
await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False)
result = await client_2.get_plots()
assert len(result["plots"]) == expect_total_plots
+ assert len(harvester.plot_manager.failed_to_open_filenames) == 0
# Add plot_dir with two new plots
await test_case(
@@ -291,6 +294,39 @@ async def test_case(
expect_total_plots=0,
)
+ # Test re-trying if processing a plot failed
+ # First save the plot
+ retry_test_plot = Path(plot_dir_sub / filename_2).resolve()
+ retry_test_plot_save = Path(plot_dir_sub / "save").resolve()
+ copy(retry_test_plot, retry_test_plot_save)
+ # Invalidate the plot
+ with open(plot_dir_sub / filename_2, "r+b") as file:
+ file.write(bytes(100))
+ # Add it and validate it fails to load
+ await harvester.add_plot_directory(str(plot_dir_sub))
+ expected_result.loaded_plots = 0
+ expected_result.removed_plots = 0
+ expected_result.processed_files = 1
+ expected_result.remaining_files = 0
+ harvester.plot_manager.start_refreshing()
+ await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False)
+ assert retry_test_plot in harvester.plot_manager.failed_to_open_filenames
+ # Make sure the file stays in `failed_to_open_filenames` and doesn't get loaded or processed in the next
+ # update round
+ expected_result.loaded_plots = 0
+ expected_result.processed_files = 0
+ harvester.plot_manager.trigger_refresh()
+ await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False)
+ assert retry_test_plot in harvester.plot_manager.failed_to_open_filenames
+ # Now decrease the re-try timeout, restore the valid plot file and make sure it properly loads now
+ harvester.plot_manager.refresh_parameter.retry_invalid_seconds = 0
+ move(retry_test_plot_save, retry_test_plot)
+ expected_result.loaded_plots = 1
+ expected_result.processed_files = 1
+ harvester.plot_manager.trigger_refresh()
+ await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False)
+ assert retry_test_plot not in harvester.plot_manager.failed_to_open_filenames
+
targets_1 = await client.get_reward_targets(False)
assert "have_pool_sk" not in targets_1
assert "have_farmer_sk" not in targets_1
| [BUG] Harvester plots_refresh_parameter not keeping track of plots
The harvester is losing track of my plots during replotting, and my plot count is going down.
I have 5156 plots. All of my drives are full, so I am currently replotting.
I have a script that, upon completion of a new NFT plot, it identifies an OG plot, deletes it, and moves the new NFT plot in its place.
I am currently creating 64 plots every 24h.
Earlier today, my harvester did indeed show that it was farming `5156` plots. I check my status a few moments ago, and it shows that I'm only farming `5129` plots.
![2021-08-29_01-35](https://user-images.githubusercontent.com/8990544/131241137-9b0aa144-d510-49d8-8e6c-fd3b0346de39.png)
(Then some time passes for the farmer to rediscover all my plots)
![2021-08-29_01-36](https://user-images.githubusercontent.com/8990544/131241230-535680eb-f101-464b-bd3b-cbe2bca1bf7f.png)
Ok, so back to `5156` after restarting the farmer. It lost track of 27 plots.
When I first discovered this bug this morning, there was a Δ of 85 plots that the harvester wasn't tracking.
> 2021-08-28T13:54:20.544 harvester chia.harvester.harvester: INFO ... Total 5071 plots
`chia stop all -d`
`chia start farmer`
> 2021-08-28T14:49:27.451 harvester chia.harvester.harvester: INFO ... Total 5156 plots
What I suspect:
My config.yaml had been with me since before launch, and I decided to compare it against https://github.com/Chia-Network/chia-blockchain/blob/main/chia/util/initial-config.yaml to see if I needed to refresh it any. There were a few things that I was missing.
I had `plot_loading_frequency_seconds: 120`, which isn't anywhere in the current initial-config.yaml, so I swapped that out with: https://github.com/Chia-Network/chia-blockchain/blob/main/chia/util/initial-config.yaml#L128-L131
I think that this may not be discovering new plots for some reason. I'm going to attempt to remove those lines, and go back to `plot_loading_frequency_seconds: 120`. I'll check back in the morning to see if the count is off.
Chia: 1.2.5
Distro: Arch Linux [x86_64]
Kernel: Linux 5.13.12-arch1-1
Processor: 24 x Intel(R) Xeon(R) CPU E5-2697 v2 @ 2.70GHz
Processor: 24 x Intel(R) Xeon(R) CPU E5-2697 v2 @ 2.70GHz
RAM: 64362 MiB
| We are having issues as well with the harvesters not counting all the plots since upgrading to 1.2.5. We will try to use a fresh config file and see if we can get those missing plots back. Our configs are very old as well.
Changing the config back to `plot_loading_frequency_seconds: 120` did not seem to do anything.
This may be due to how the new manager bans badbit/failbit plots.
My final plot management scripts for plotting, and replotting, move plots into active harvester directories with the `.plot` extension. If the file is still being copied when the manager finds it, this causes the manager to throw:
> harvester chia.plotting.manager : ERROR Failed to open file /<path_to_plot>/plot-k32-<plotId>.plot. badbit or failbit after reading size 4 at position 107547188232 Traceback (most recent call last): File "/<path_to>/chia-blockchain/chia/plotting/manager.py", line 191, in process_file prover = DiskProver(str(file_path)) RuntimeError: badbit or failbit after reading size 4 at position 107547188232
See here:
https://github.com/Chia-Network/chia-blockchain/blob/53149f2e57743e203aba1d25a89786edcf1f78ab/chia/plotting/manager.py#L198-#L199
I don't know if this outright bans it, or puts it in a loop to try to read it again in 20m. The behavior I'm seeing makes me think it's the former.
For now, I've simply modified my re-plot script (https://github.com/wallentx/farm-and-ranch-supply-depot/blob/main/bin/final-plot-management/replot/replot#L83) to move the plot into the active harvester directory with "-TMP" appended, and removing that when it's finished being copied.
I'll follow up if this solves it
Yep, that solved it. It seems like the ban on badbit/failbit lasts until a restart of the farmer?
Yes, agreed. I am replotting as well (from a different machine) and then copying the files over from external drives. Previously, while the copy was in progress the plots may be flagged as bad, but it would eventually farm them a little bit after they finished copying. Now, once they get flagged as bad it does not appear to ever attempt to farm them again. Closing and re-opening caused it to refresh all the plots and the ones that had been flagged as bad disappeared and started farming as normal.
Quite annoying when copying file all day (and all night) to have to now restart the client every once in a while to make sure they're being farmed. Wish I had stuck with the older version...
> Yep, that solved it. It seems like the ban on badbit/failbit lasts until a restart of the farmer?
restarting the harvester alone appears to be sufficient.
`chia start harvester -r`
On Linux, use rsync to copy plots if you don't want to script changing file names. rsync creates a temporary file while transferring and renames it to the final file name after transfer is complete. | 2021-08-30T19:44:46Z | [] | [] |
Chia-Network/chia-blockchain | 10,609 | Chia-Network__chia-blockchain-10609 | [
"10606"
] | 6ff6fbe7de58748cbfa91eb7578dc027684ee6bf | diff --git a/chia/cmds/plotnft_funcs.py b/chia/cmds/plotnft_funcs.py
--- a/chia/cmds/plotnft_funcs.py
+++ b/chia/cmds/plotnft_funcs.py
@@ -279,7 +279,8 @@ async def submit_tx_with_confirmation(
if user_input.lower() == "y" or user_input.lower() == "yes":
try:
- tx_record: TransactionRecord = await func()
+ result: Dict = await func()
+ tx_record: TransactionRecord = result["transaction"]
start = time.time()
while time.time() - start < 10:
await asyncio.sleep(0.1)
diff --git a/chia/rpc/wallet_rpc_client.py b/chia/rpc/wallet_rpc_client.py
--- a/chia/rpc/wallet_rpc_client.py
+++ b/chia/rpc/wallet_rpc_client.py
@@ -12,6 +12,13 @@
from chia.wallet.transaction_sorting import SortKey
+def parse_result_transactions(result: Dict[str, Any]) -> Dict[str, Any]:
+ result["transaction"] = TransactionRecord.from_json_dict(result["transaction"])
+ if result["fee_transaction"]:
+ result["fee_transaction"] = TransactionRecord.from_json_dict(result["fee_transaction"])
+ return result
+
+
class WalletRpcClient(RpcClient):
"""
Client to Chia RPC, connects to a local wallet. Uses HTTP/JSON, and converts back from
@@ -291,10 +298,10 @@ async def create_new_pool_wallet(
res = await self.fetch("create_new_wallet", request)
return TransactionRecord.from_json_dict(res["transaction"])
- async def pw_self_pool(self, wallet_id: str, fee: uint64) -> TransactionRecord:
- return TransactionRecord.from_json_dict(
- (await self.fetch("pw_self_pool", {"wallet_id": wallet_id, "fee": fee}))["transaction"]
- )
+ async def pw_self_pool(self, wallet_id: str, fee: uint64) -> Dict:
+ reply = await self.fetch("pw_self_pool", {"wallet_id": wallet_id, "fee": fee})
+ reply = parse_result_transactions(reply)
+ return reply
async def pw_join_pool(
self, wallet_id: str, target_puzzlehash: bytes32, pool_url: str, relative_lock_height: uint32, fee: uint64
@@ -308,17 +315,13 @@ async def pw_join_pool(
}
reply = await self.fetch("pw_join_pool", request)
- reply["transaction"] = TransactionRecord.from_json_dict(reply["transaction"])
- if reply["fee_transaction"]:
- reply["fee_transaction"] = TransactionRecord.from_json_dict(reply["fee_transaction"])
- return reply["transaction"]
+ reply = parse_result_transactions(reply)
+ return reply
async def pw_absorb_rewards(self, wallet_id: str, fee: uint64 = uint64(0)) -> Dict:
reply = await self.fetch("pw_absorb_rewards", {"wallet_id": wallet_id, "fee": fee})
reply["state"] = PoolWalletInfo.from_json_dict(reply["state"])
- reply["transaction"] = TransactionRecord.from_json_dict(reply["transaction"])
- if reply["fee_transaction"]:
- reply["fee_transaction"] = TransactionRecord.from_json_dict(reply["fee_transaction"])
+ reply = parse_result_transactions(reply)
return reply
async def pw_status(self, wallet_id: str) -> Tuple[PoolWalletInfo, List[TransactionRecord]]:
| diff --git a/tests/pools/test_pool_rpc.py b/tests/pools/test_pool_rpc.py
--- a/tests/pools/test_pool_rpc.py
+++ b/tests/pools/test_pool_rpc.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
from shutil import rmtree
-from typing import Optional, List, Dict
+from typing import Any, Optional, List, Dict
import pytest
import pytest_asyncio
@@ -802,20 +802,24 @@ async def test_self_pooling_to_pooling(self, setup, fee, trusted):
assert status.target is None
assert status_2.target is None
- join_pool_tx: TransactionRecord = await client.pw_join_pool(
- wallet_id,
- pool_ph,
- "https://pool.example.com",
- 10,
- fee,
- )
- join_pool_tx_2: TransactionRecord = await client.pw_join_pool(
- wallet_id_2,
- pool_ph,
- "https://pool.example.com",
- 10,
- fee,
- )
+ join_pool_tx: TransactionRecord = (
+ await client.pw_join_pool(
+ wallet_id,
+ pool_ph,
+ "https://pool.example.com",
+ 10,
+ fee,
+ )
+ )["transaction"]
+ join_pool_tx_2: TransactionRecord = (
+ await client.pw_join_pool(
+ wallet_id_2,
+ pool_ph,
+ "https://pool.example.com",
+ 10,
+ fee,
+ )
+ )["transaction"]
assert join_pool_tx is not None
assert join_pool_tx_2 is not None
@@ -919,13 +923,15 @@ async def have_chia():
assert status.current.state == PoolSingletonState.SELF_POOLING.value
assert status.target is None
- join_pool_tx: TransactionRecord = await client.pw_join_pool(
- wallet_id,
- pool_ph,
- "https://pool.example.com",
- 5,
- fee,
- )
+ join_pool_tx: TransactionRecord = (
+ await client.pw_join_pool(
+ wallet_id,
+ pool_ph,
+ "https://pool.example.com",
+ 5,
+ fee,
+ )
+ )["transaction"]
assert join_pool_tx is not None
status: PoolWalletInfo = (await client.pw_status(wallet_id))[0]
@@ -953,9 +959,9 @@ async def status_is_farming_to_pool():
status: PoolWalletInfo = (await client.pw_status(wallet_id))[0]
- leave_pool_tx: TransactionRecord = await client.pw_self_pool(wallet_id, fee)
- assert leave_pool_tx.wallet_id == wallet_id
- assert leave_pool_tx.amount == 1
+ leave_pool_tx: Dict[str, Any] = await client.pw_self_pool(wallet_id, fee)
+ assert leave_pool_tx["transaction"].wallet_id == wallet_id
+ assert leave_pool_tx["transaction"].amount == 1
async def status_is_leaving():
await self.farm_blocks(full_node_api, our_ph, 1)
@@ -1054,13 +1060,15 @@ async def status_is_farming_to_pool():
assert pw_info.current.relative_lock_height == 5
status: PoolWalletInfo = (await client.pw_status(wallet_id))[0]
- join_pool_tx: TransactionRecord = await client.pw_join_pool(
- wallet_id,
- pool_b_ph,
- "https://pool-b.org",
- 10,
- fee,
- )
+ join_pool_tx: TransactionRecord = (
+ await client.pw_join_pool(
+ wallet_id,
+ pool_b_ph,
+ "https://pool-b.org",
+ 10,
+ fee,
+ )
+ )["transaction"]
assert join_pool_tx is not None
async def status_is_leaving():
@@ -1156,13 +1164,15 @@ async def status_is_farming_to_pool():
assert pw_info.current.pool_url == "https://pool-a.org"
assert pw_info.current.relative_lock_height == 5
- join_pool_tx: TransactionRecord = await client.pw_join_pool(
- wallet_id,
- pool_b_ph,
- "https://pool-b.org",
- 10,
- fee,
- )
+ join_pool_tx: TransactionRecord = (
+ await client.pw_join_pool(
+ wallet_id,
+ pool_b_ph,
+ "https://pool-b.org",
+ 10,
+ fee,
+ )
+ )["transaction"]
assert join_pool_tx is not None
await time_out_assert(
10,
| [Bug] CLI error returned when running `chia plotnft claim`
### What happened?
```
chia plotnft claim -i 2
Will claim rewards for wallet ID: 2.
Error performing operation on Plot NFT -f xxxxxxxxxx wallet id: 2: 'dict' object has no attribute 'name'
```
The claim transaction does go through - but the CLI is reporting an error.
### Version
1.3
### What platform are you using?
Linux
### What ui mode are you using?
CLI
### Relevant log output
_No response_
| 2022-03-08T23:38:36Z | [] | [] |
|
Chia-Network/chia-blockchain | 11,259 | Chia-Network__chia-blockchain-11259 | [
"11036"
] | 527b30e917247ab6eb747a8de12a6ced9ec6d621 | diff --git a/chia/farmer/farmer.py b/chia/farmer/farmer.py
--- a/chia/farmer/farmer.py
+++ b/chia/farmer/farmer.py
@@ -4,13 +4,12 @@
import time
import traceback
from pathlib import Path
-from typing import Any, Callable, Dict, List, Optional, Tuple
+from typing import Any, Callable, Dict, List, Optional, Set, Tuple
import aiohttp
from blspy import AugSchemeMPL, G1Element, G2Element, PrivateKey
import chia.server.ws_connection as ws # lgtm [py/import-and-import-from]
-from chia.consensus.coinbase import create_puzzlehash_for_pk
from chia.consensus.constants import ConsensusConstants
from chia.daemon.keychain_proxy import (
KeychainProxy,
@@ -51,7 +50,7 @@
find_owner_sk,
master_sk_to_farmer_sk,
master_sk_to_pool_sk,
- master_sk_to_wallet_sk,
+ match_address_to_sk,
)
from chia.wallet.puzzles.singleton_top_layer import SINGLETON_MOD
@@ -503,7 +502,7 @@ async def update_pool_farmer_info() -> Tuple[Optional[GetFarmerResponse], Option
farmer_info, error_code = await update_pool_farmer_info()
if error_code == PoolErrorCode.FARMER_NOT_KNOWN:
# Make the farmer known on the pool with a POST /farmer
- owner_sk_and_index: Optional[PrivateKey, uint32] = find_owner_sk(
+ owner_sk_and_index: Optional[Tuple[PrivateKey, uint32]] = find_owner_sk(
self.all_root_sks, pool_config.owner_public_key
)
assert owner_sk_and_index is not None
@@ -527,7 +526,7 @@ async def update_pool_farmer_info() -> Tuple[Optional[GetFarmerResponse], Option
and pool_config.payout_instructions.lower() != farmer_info.payout_instructions.lower()
)
if payout_instructions_update_required or error_code == PoolErrorCode.INVALID_SIGNATURE:
- owner_sk_and_index: Optional[PrivateKey, uint32] = find_owner_sk(
+ owner_sk_and_index: Optional[Tuple[PrivateKey, uint32]] = find_owner_sk(
self.all_root_sks, pool_config.owner_public_key
)
assert owner_sk_and_index is not None
@@ -550,25 +549,30 @@ def get_public_keys(self):
def get_private_keys(self):
return self._private_keys
- async def get_reward_targets(self, search_for_private_key: bool) -> Dict:
+ async def get_reward_targets(self, search_for_private_key: bool, max_ph_to_search: int = 500) -> Dict:
if search_for_private_key:
all_sks = await self.get_all_private_keys()
- stop_searching_for_farmer, stop_searching_for_pool = False, False
- for i in range(500):
- if stop_searching_for_farmer and stop_searching_for_pool and i > 0:
+ have_farmer_sk, have_pool_sk = False, False
+ search_addresses: List[bytes32] = [self.farmer_target, self.pool_target]
+ for sk, _ in all_sks:
+ found_addresses: Set[bytes32] = match_address_to_sk(sk, search_addresses, max_ph_to_search)
+
+ if not have_farmer_sk and self.farmer_target in found_addresses:
+ search_addresses.remove(self.farmer_target)
+ have_farmer_sk = True
+
+ if not have_pool_sk and self.pool_target in found_addresses:
+ search_addresses.remove(self.pool_target)
+ have_pool_sk = True
+
+ if have_farmer_sk and have_pool_sk:
break
- for sk, _ in all_sks:
- ph = create_puzzlehash_for_pk(master_sk_to_wallet_sk(sk, uint32(i)).get_g1())
- if ph == self.farmer_target:
- stop_searching_for_farmer = True
- if ph == self.pool_target:
- stop_searching_for_pool = True
return {
"farmer_target": self.farmer_target_encoded,
"pool_target": self.pool_target_encoded,
- "have_farmer_sk": stop_searching_for_farmer,
- "have_pool_sk": stop_searching_for_pool,
+ "have_farmer_sk": have_farmer_sk,
+ "have_pool_sk": have_pool_sk,
}
return {
"farmer_target": self.farmer_target_encoded,
diff --git a/chia/rpc/farmer_rpc_api.py b/chia/rpc/farmer_rpc_api.py
--- a/chia/rpc/farmer_rpc_api.py
+++ b/chia/rpc/farmer_rpc_api.py
@@ -97,7 +97,8 @@ async def get_signage_points(self, _: Dict) -> Dict[str, Any]:
async def get_reward_targets(self, request: Dict) -> Dict:
search_for_private_key = request["search_for_private_key"]
- return await self.service.get_reward_targets(search_for_private_key)
+ max_ph_to_search = request.get("max_ph_to_search", 500)
+ return await self.service.get_reward_targets(search_for_private_key, max_ph_to_search)
async def set_reward_targets(self, request: Dict) -> Dict:
farmer_target, pool_target = None, None
diff --git a/chia/rpc/farmer_rpc_client.py b/chia/rpc/farmer_rpc_client.py
--- a/chia/rpc/farmer_rpc_client.py
+++ b/chia/rpc/farmer_rpc_client.py
@@ -22,8 +22,11 @@ async def get_signage_point(self, sp_hash: bytes32) -> Optional[Dict]:
async def get_signage_points(self) -> List[Dict]:
return (await self.fetch("get_signage_points", {}))["signage_points"]
- async def get_reward_targets(self, search_for_private_key: bool) -> Dict:
- response = await self.fetch("get_reward_targets", {"search_for_private_key": search_for_private_key})
+ async def get_reward_targets(self, search_for_private_key: bool, max_ph_to_search: int = 500) -> Dict:
+ response = await self.fetch(
+ "get_reward_targets",
+ {"search_for_private_key": search_for_private_key, "max_ph_to_search": max_ph_to_search},
+ )
return_dict = {
"farmer_target": response["farmer_target"],
"pool_target": response["pool_target"],
diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py
--- a/chia/rpc/wallet_rpc_api.py
+++ b/chia/rpc/wallet_rpc_api.py
@@ -24,10 +24,15 @@
from chia.util.ws_message import WsRpcMessage, create_payload_dict
from chia.wallet.cat_wallet.cat_constants import DEFAULT_CATS
from chia.wallet.cat_wallet.cat_wallet import CATWallet
-from chia.wallet.derive_keys import master_sk_to_singleton_owner_sk, master_sk_to_wallet_sk_unhardened, MAX_POOL_WALLETS
-from chia.wallet.rl_wallet.rl_wallet import RLWallet
-from chia.wallet.derive_keys import master_sk_to_farmer_sk, master_sk_to_pool_sk, master_sk_to_wallet_sk
+from chia.wallet.derive_keys import (
+ MAX_POOL_WALLETS,
+ master_sk_to_farmer_sk,
+ master_sk_to_pool_sk,
+ master_sk_to_singleton_owner_sk,
+ match_address_to_sk,
+)
from chia.wallet.did_wallet.did_wallet import DIDWallet
+from chia.wallet.rl_wallet.rl_wallet import RLWallet
from chia.wallet.trade_record import TradeRecord
from chia.wallet.trading.offer import Offer
from chia.wallet.transaction_record import TransactionRecord
@@ -36,7 +41,6 @@
from chia.wallet.wallet_info import WalletInfo
from chia.wallet.wallet_node import WalletNode
from chia.util.config import load_config
-from chia.consensus.coinbase import create_puzzlehash_for_pk
# Timeout for response from wallet/full node for sending a transaction
TIMEOUT = 30
@@ -302,25 +306,12 @@ async def _check_key_used_for_rewards(
config: Dict = load_config(new_root, "config.yaml")
farmer_target = config["farmer"].get("xch_target_address")
pool_target = config["pool"].get("xch_target_address")
- found_farmer = False
- found_pool = False
- selected = config["selected_network"]
- prefix = config["network_overrides"]["config"][selected]["address_prefix"]
- for i in range(max_ph_to_search):
- if found_farmer and found_pool:
- break
-
- phs = [
- encode_puzzle_hash(create_puzzlehash_for_pk(master_sk_to_wallet_sk(sk, uint32(i)).get_g1()), prefix),
- encode_puzzle_hash(
- create_puzzlehash_for_pk(master_sk_to_wallet_sk_unhardened(sk, uint32(i)).get_g1()), prefix
- ),
- ]
- for ph in phs:
- if ph == farmer_target:
- found_farmer = True
- if ph == pool_target:
- found_pool = True
+ address_to_check: List[bytes32] = [decode_puzzle_hash(farmer_target), decode_puzzle_hash(pool_target)]
+
+ found_addresses: Set[bytes32] = match_address_to_sk(sk, address_to_check, max_ph_to_search)
+
+ found_farmer = address_to_check[0] in found_addresses
+ found_pool = address_to_check[1] in found_addresses
return found_farmer, found_pool
@@ -334,9 +325,12 @@ async def check_delete_key(self, request):
walletBalance: bool = False
fingerprint = request["fingerprint"]
+ max_ph_to_search = request.get("max_ph_to_search", 100)
sk, _ = await self._get_private_key(fingerprint)
if sk is not None:
- used_for_farmer, used_for_pool = await self._check_key_used_for_rewards(self.service.root_path, sk, 100)
+ used_for_farmer, used_for_pool = await self._check_key_used_for_rewards(
+ self.service.root_path, sk, max_ph_to_search
+ )
if self.service.logged_in_fingerprint != fingerprint:
await self._stop_wallet()
diff --git a/chia/rpc/wallet_rpc_client.py b/chia/rpc/wallet_rpc_client.py
--- a/chia/rpc/wallet_rpc_client.py
+++ b/chia/rpc/wallet_rpc_client.py
@@ -58,8 +58,8 @@ async def add_key(self, mnemonic: List[str], request_type: str = "new_wallet") -
async def delete_key(self, fingerprint: int) -> None:
return await self.fetch("delete_key", {"fingerprint": fingerprint})
- async def check_delete_key(self, fingerprint: int) -> None:
- return await self.fetch("check_delete_key", {"fingerprint": fingerprint})
+ async def check_delete_key(self, fingerprint: int, max_ph_to_search: int = 100) -> None:
+ return await self.fetch("check_delete_key", {"fingerprint": fingerprint, "max_ph_to_search": max_ph_to_search})
async def delete_all_keys(self) -> None:
return await self.fetch("delete_all_keys", {})
diff --git a/chia/wallet/derive_keys.py b/chia/wallet/derive_keys.py
--- a/chia/wallet/derive_keys.py
+++ b/chia/wallet/derive_keys.py
@@ -1,7 +1,9 @@
-from typing import List, Optional, Tuple
+from typing import List, Optional, Tuple, Set
from blspy import AugSchemeMPL, PrivateKey, G1Element
+from chia.consensus.coinbase import create_puzzlehash_for_pk
+from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.ints import uint32
# EIP 2334 bls key derivation
@@ -76,7 +78,7 @@ def master_sk_to_pooling_authentication_sk(master: PrivateKey, pool_wallet_index
return _derive_path(master, [12381, 8444, 6, pool_wallet_index * 10000 + index])
-def find_owner_sk(all_sks: List[PrivateKey], owner_pk: G1Element) -> Optional[Tuple[G1Element, uint32]]:
+def find_owner_sk(all_sks: List[PrivateKey], owner_pk: G1Element) -> Optional[Tuple[PrivateKey, uint32]]:
for pool_wallet_index in range(MAX_POOL_WALLETS):
for sk in all_sks:
try_owner_sk = master_sk_to_singleton_owner_sk(sk, uint32(pool_wallet_index))
@@ -95,3 +97,34 @@ def find_authentication_sk(all_sks: List[PrivateKey], owner_pk: G1Element) -> Op
# NOTE: ONLY use 0 for authentication key index to ensure compatibility
return master_sk_to_pooling_authentication_sk(sk, uint32(pool_wallet_index), uint32(0))
return None
+
+
+def match_address_to_sk(
+ sk: PrivateKey, addresses_to_search: List[bytes32], max_ph_to_search: int = 500
+) -> Set[bytes32]:
+ """
+ Checks the list of given address is a derivation of the given sk within the given number of derivations
+ Returns a Set of the addresses that are derivations of the given sk
+ """
+ if sk is None or not addresses_to_search:
+ return set()
+
+ found_addresses: Set[bytes32] = set()
+ search_list: Set[bytes32] = set(addresses_to_search)
+
+ for i in range(max_ph_to_search):
+
+ phs = [
+ create_puzzlehash_for_pk(master_sk_to_wallet_sk(sk, uint32(i)).get_g1()),
+ create_puzzlehash_for_pk(master_sk_to_wallet_sk_unhardened(sk, uint32(i)).get_g1()),
+ ]
+
+ for address in search_list:
+ if address in phs:
+ found_addresses.add(address)
+
+ search_list = search_list - found_addresses
+ if not len(search_list):
+ return found_addresses
+
+ return found_addresses
| diff --git a/tests/core/test_farmer_harvester_rpc.py b/tests/core/test_farmer_harvester_rpc.py
--- a/tests/core/test_farmer_harvester_rpc.py
+++ b/tests/core/test_farmer_harvester_rpc.py
@@ -17,7 +17,7 @@
from chia.util.config import load_config, lock_and_load_config, save_config
from chia.util.hash import std_hash
from chia.util.ints import uint8, uint16, uint32, uint64
-from chia.wallet.derive_keys import master_sk_to_wallet_sk
+from chia.wallet.derive_keys import master_sk_to_wallet_sk, master_sk_to_wallet_sk_unhardened
from tests.setup_nodes import setup_harvester_farmer, test_constants
from tests.time_out_assert import time_out_assert, time_out_assert_custom_interval
from tests.util.rpc import validate_get_routes
@@ -181,36 +181,48 @@ async def test_farmer_reward_target_endpoints(bt, harvester_farmer_environment):
targets_1 = await farmer_rpc_client.get_reward_targets(False)
assert "have_pool_sk" not in targets_1
assert "have_farmer_sk" not in targets_1
- targets_2 = await farmer_rpc_client.get_reward_targets(True)
+ targets_2 = await farmer_rpc_client.get_reward_targets(True, 2)
assert targets_2["have_pool_sk"] and targets_2["have_farmer_sk"]
- new_ph: bytes32 = create_puzzlehash_for_pk(master_sk_to_wallet_sk(bt.farmer_master_sk, uint32(10)).get_g1())
- new_ph_2: bytes32 = create_puzzlehash_for_pk(master_sk_to_wallet_sk(bt.pool_master_sk, uint32(472)).get_g1())
+ new_ph: bytes32 = create_puzzlehash_for_pk(master_sk_to_wallet_sk(bt.farmer_master_sk, uint32(2)).get_g1())
+ new_ph_2: bytes32 = create_puzzlehash_for_pk(master_sk_to_wallet_sk(bt.pool_master_sk, uint32(7)).get_g1())
await farmer_rpc_client.set_reward_targets(encode_puzzle_hash(new_ph, "xch"), encode_puzzle_hash(new_ph_2, "xch"))
- targets_3 = await farmer_rpc_client.get_reward_targets(True)
+ targets_3 = await farmer_rpc_client.get_reward_targets(True, 10)
assert decode_puzzle_hash(targets_3["farmer_target"]) == new_ph
assert decode_puzzle_hash(targets_3["pool_target"]) == new_ph_2
assert targets_3["have_pool_sk"] and targets_3["have_farmer_sk"]
- new_ph_3: bytes32 = create_puzzlehash_for_pk(master_sk_to_wallet_sk(bt.pool_master_sk, uint32(1888)).get_g1())
- await farmer_rpc_client.set_reward_targets(None, encode_puzzle_hash(new_ph_3, "xch"))
- targets_4 = await farmer_rpc_client.get_reward_targets(True)
- assert decode_puzzle_hash(targets_4["farmer_target"]) == new_ph
- assert decode_puzzle_hash(targets_4["pool_target"]) == new_ph_3
- assert not targets_4["have_pool_sk"] and targets_3["have_farmer_sk"]
+ # limit the derivation search to 3 should fail to find the pool sk
+ targets_4 = await farmer_rpc_client.get_reward_targets(True, 3)
+ assert not targets_4["have_pool_sk"] and targets_4["have_farmer_sk"]
+
+ # check observer addresses
+ observer_farmer: bytes32 = create_puzzlehash_for_pk(
+ master_sk_to_wallet_sk_unhardened(bt.farmer_master_sk, uint32(2)).get_g1()
+ )
+ observer_pool: bytes32 = create_puzzlehash_for_pk(
+ master_sk_to_wallet_sk_unhardened(bt.pool_master_sk, uint32(7)).get_g1()
+ )
+ await farmer_rpc_client.set_reward_targets(
+ encode_puzzle_hash(observer_farmer, "xch"), encode_puzzle_hash(observer_pool, "xch")
+ )
+ targets = await farmer_rpc_client.get_reward_targets(True, 10)
+ assert decode_puzzle_hash(targets["farmer_target"]) == observer_farmer
+ assert decode_puzzle_hash(targets["pool_target"]) == observer_pool
+ assert targets["have_pool_sk"] and targets["have_farmer_sk"]
root_path = farmer_api.farmer._root_path
config = load_config(root_path, "config.yaml")
- assert config["farmer"]["xch_target_address"] == encode_puzzle_hash(new_ph, "xch")
- assert config["pool"]["xch_target_address"] == encode_puzzle_hash(new_ph_3, "xch")
+ assert config["farmer"]["xch_target_address"] == encode_puzzle_hash(observer_farmer, "xch")
+ assert config["pool"]["xch_target_address"] == encode_puzzle_hash(observer_pool, "xch")
- new_ph_3_encoded = encode_puzzle_hash(new_ph_3, "xch")
- added_char = new_ph_3_encoded + "a"
+ new_ph_2_encoded = encode_puzzle_hash(new_ph_2, "xch")
+ added_char = new_ph_2_encoded + "a"
with pytest.raises(ValueError):
await farmer_rpc_client.set_reward_targets(None, added_char)
- replaced_char = new_ph_3_encoded[0:-1] + "a"
+ replaced_char = new_ph_2_encoded[0:-1] + "a"
with pytest.raises(ValueError):
await farmer_rpc_client.set_reward_targets(None, replaced_char)
diff --git a/tests/wallet/rpc/test_wallet_rpc.py b/tests/wallet/rpc/test_wallet_rpc.py
--- a/tests/wallet/rpc/test_wallet_rpc.py
+++ b/tests/wallet/rpc/test_wallet_rpc.py
@@ -26,7 +26,7 @@
from chia.util.ints import uint16, uint32, uint64
from chia.wallet.cat_wallet.cat_constants import DEFAULT_CATS
from chia.wallet.cat_wallet.cat_wallet import CATWallet
-from chia.wallet.derive_keys import master_sk_to_wallet_sk
+from chia.wallet.derive_keys import master_sk_to_wallet_sk, master_sk_to_wallet_sk_unhardened
from chia.wallet.trading.trade_status import TradeStatus
from chia.wallet.transaction_record import TransactionRecord
from chia.wallet.transaction_sorting import SortKey
@@ -643,7 +643,37 @@ async def tx_in_mempool_2():
assert sk_dict["used_for_pool_rewards"] is True
# Check unknown key
- sk_dict = await client.check_delete_key(123456)
+ sk_dict = await client.check_delete_key(123456, 10)
+ assert sk_dict["fingerprint"] == 123456
+ assert sk_dict["used_for_farmer_rewards"] is False
+ assert sk_dict["used_for_pool_rewards"] is False
+
+ # Add in observer reward addresses into farmer and pool for testing delete key checks
+ # set farmer to first private key
+ sk = await wallet_node.get_key_for_fingerprint(pks[0])
+ test_ph = create_puzzlehash_for_pk(master_sk_to_wallet_sk_unhardened(sk, uint32(0)).get_g1())
+ with lock_and_load_config(wallet_node.root_path, "config.yaml") as test_config:
+ test_config["farmer"]["xch_target_address"] = encode_puzzle_hash(test_ph, "txch")
+ # set pool to second private key
+ sk = await wallet_node.get_key_for_fingerprint(pks[1])
+ test_ph = create_puzzlehash_for_pk(master_sk_to_wallet_sk_unhardened(sk, uint32(0)).get_g1())
+ test_config["pool"]["xch_target_address"] = encode_puzzle_hash(test_ph, "txch")
+ save_config(wallet_node.root_path, "config.yaml", test_config)
+
+ # Check first key
+ sk_dict = await client.check_delete_key(pks[0])
+ assert sk_dict["fingerprint"] == pks[0]
+ assert sk_dict["used_for_farmer_rewards"] is True
+ assert sk_dict["used_for_pool_rewards"] is False
+
+ # Check second key
+ sk_dict = await client.check_delete_key(pks[1])
+ assert sk_dict["fingerprint"] == pks[1]
+ assert sk_dict["used_for_farmer_rewards"] is False
+ assert sk_dict["used_for_pool_rewards"] is True
+
+ # Check unknown key
+ sk_dict = await client.check_delete_key(123456, 10)
assert sk_dict["fingerprint"] == 123456
assert sk_dict["used_for_farmer_rewards"] is False
assert sk_dict["used_for_pool_rewards"] is False
| Farming rewards dialog incorrectly claims there is no private key for address
### What happened?
![image](https://user-images.githubusercontent.com/70252155/161584812-ffaae686-c8c5-4218-9914-b04a9473db95.png)
### Version
1.3.3
### What platform are you using?
Windows
### What ui mode are you using?
GUI
### Relevant log output
_No response_
| Are you sending your rewards to a cold wallet? This message is expected if so.
I have the same problem and I am sending my reward to the hot wallet!
This is a bug with observer addresses - this dialog is not checking the new observer addresses | 2022-04-21T21:47:46Z | [] | [] |
Chia-Network/chia-blockchain | 13,502 | Chia-Network__chia-blockchain-13502 | [
"13084"
] | 357075235a28f77b69873c04a2dbab59020ef759 | diff --git a/chia/plotting/cache.py b/chia/plotting/cache.py
--- a/chia/plotting/cache.py
+++ b/chia/plotting/cache.py
@@ -2,6 +2,7 @@
import time
import traceback
from dataclasses import dataclass, field
+from math import ceil
from pathlib import Path
from typing import Dict, ItemsView, KeysView, List, Optional, Tuple, ValuesView
@@ -132,8 +133,10 @@ def load(self) -> None:
stored_cache: VersionedBlob = VersionedBlob.from_bytes(serialized)
if stored_cache.version == CURRENT_VERSION:
cache_data: CacheDataV1 = CacheDataV1.from_bytes(stored_cache.blob)
- self._data = {
- Path(path): CacheEntry(
+ self._data = {}
+ estimated_c2_sizes: Dict[int, int] = {}
+ for path, cache_entry in cache_data.entries:
+ new_entry = CacheEntry(
DiskProver.from_bytes(cache_entry.prover_data),
cache_entry.farmer_public_key,
cache_entry.pool_public_key,
@@ -141,8 +144,27 @@ def load(self) -> None:
cache_entry.plot_public_key,
float(cache_entry.last_use),
)
- for path, cache_entry in cache_data.entries
- }
+ # TODO, drop the below entry dropping after few versions or whenever we force a cache recreation.
+ # it's here to filter invalid cache entries coming from bladebit RAM plotting.
+ # Related: - https://github.com/Chia-Network/chia-blockchain/issues/13084
+ # - https://github.com/Chia-Network/chiapos/pull/337
+ k = new_entry.prover.get_size()
+ if k not in estimated_c2_sizes:
+ estimated_c2_sizes[k] = ceil(2 ** k / 100_000_000) * ceil(k / 8)
+ memo_size = len(new_entry.prover.get_memo())
+ prover_size = len(cache_entry.prover_data)
+ # Estimated C2 size + memo size + 2000 (static data + path)
+ # static data: version(2) + table pointers (<=96) + id(32) + k(1) => ~130
+ # path: up to ~1870, all above will lead to false positive.
+ # See https://github.com/Chia-Network/chiapos/blob/3ee062b86315823dd775453ad320b8be892c7df3/src/prover_disk.hpp#L282-L287 # noqa: E501
+ if prover_size > (estimated_c2_sizes[k] + memo_size + 2000):
+ log.warning(
+ "Suspicious cache entry dropped. Recommended: stop the harvester, remove "
+ f"{self._path}, restart. Entry: size {prover_size}, path {path}"
+ )
+ else:
+ self._data[Path(path)] = new_entry
+
else:
raise ValueError(f"Invalid cache version {stored_cache.version}. Expected version {CURRENT_VERSION}.")
except FileNotFoundError:
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@
"blspy==1.0.15", # Signature library
"chiavdf==1.0.6", # timelord and vdf verification
"chiabip158==1.1", # bip158-style wallet filters
- "chiapos==1.0.10", # proof of space
+ "chiapos==1.0.11", # proof of space
"clvm==0.9.7",
"clvm_tools==0.4.5", # Currying, Program.to, other conveniences
"chia_rs==0.1.10",
| diff --git a/tests/plotting/test_plot_manager.py b/tests/plotting/test_plot_manager.py
--- a/tests/plotting/test_plot_manager.py
+++ b/tests/plotting/test_plot_manager.py
@@ -1,4 +1,5 @@
import logging
+import sys
import time
from dataclasses import dataclass, replace
from os import unlink
@@ -9,6 +10,7 @@
import pytest
from blspy import G1Element
+from chia.plotting.cache import CURRENT_VERSION, CacheDataV1
from chia.plotting.manager import Cache, PlotManager
from chia.plotting.util import (
PlotInfo,
@@ -22,7 +24,8 @@
from chia.simulator.block_tools import get_plot_dir
from chia.simulator.time_out_assert import time_out_assert
from chia.util.config import create_default_chia_config, lock_and_load_config, save_config
-from chia.util.ints import uint32
+from chia.util.ints import uint16, uint32
+from chia.util.misc import VersionedBlob
from tests.plotting.util import get_test_plots
log = logging.getLogger(__name__)
@@ -520,6 +523,91 @@ async def test_plot_info_caching(environment, bt):
plot_manager.stop_refreshing()
+@pytest.mark.asyncio
+async def test_drop_too_large_cache_entries(environment, bt):
+ env: Environment = environment
+ expected_result = PlotRefreshResult(loaded=env.dir_1.plot_info_list(), processed=len(env.dir_1))
+ add_plot_directory(env.root_path, str(env.dir_1.path))
+ await env.refresh_tester.run(expected_result)
+ assert env.refresh_tester.plot_manager.cache.path().exists()
+ assert len(env.dir_1) >= 6, "This test requires at least 6 cache entries"
+ # Load the cache entries
+ cache_path = env.refresh_tester.plot_manager.cache.path()
+ serialized = cache_path.read_bytes()
+ stored_cache: VersionedBlob = VersionedBlob.from_bytes(serialized)
+ cache_data: CacheDataV1 = CacheDataV1.from_bytes(stored_cache.blob)
+
+ def modify_cache_entry(index: int, additional_data: int, modify_memo: bool) -> str:
+ path, cache_entry = cache_data.entries[index]
+ prover_data = cache_entry.prover_data
+ # Size of length hints in chiapos serialization currently depends on the platform
+ size_length = 8 if sys.maxsize > 2 ** 32 else 4
+ # Version
+ version_size = 2
+ version = prover_data[0:version_size]
+ # Filename
+ filename_offset = version_size + size_length
+ filename_length = int.from_bytes(prover_data[version_size:filename_offset], byteorder=sys.byteorder)
+ filename = prover_data[filename_offset : filename_offset + filename_length]
+ # Memo
+ memo_length_offset = filename_offset + filename_length
+ memo_length = int.from_bytes(
+ prover_data[memo_length_offset : memo_length_offset + size_length], byteorder=sys.byteorder
+ )
+ memo_offset = memo_length_offset + size_length
+ memo = prover_data[memo_offset : memo_offset + memo_length]
+ # id, k, table pointers, C2
+ remainder = prover_data[memo_offset + memo_length :]
+
+ # Add the additional data to the filename
+ filename_length += additional_data
+ filename += bytes(b"\a" * additional_data)
+
+ # Add the additional data to the memo if requested
+ if modify_memo:
+ memo_length += additional_data
+ memo += bytes(b"\b" * additional_data)
+
+ filename_length_bytes = filename_length.to_bytes(size_length, byteorder=sys.byteorder)
+ memo_length_bytes = memo_length.to_bytes(size_length, byteorder=sys.byteorder)
+
+ cache_data.entries[index] = (
+ path,
+ replace(
+ cache_entry,
+ prover_data=bytes(version + filename_length_bytes + filename + memo_length_bytes + memo + remainder),
+ ),
+ )
+ return path
+
+ def assert_cache(expected: List[MockPlotInfo]) -> None:
+ test_cache = Cache(cache_path)
+ assert len(test_cache) == 0
+ test_cache.load()
+ assert len(test_cache) == len(expected)
+ for plot_info in expected:
+ assert test_cache.get(Path(plot_info.prover.get_filename())) is not None
+
+ # Modify two entries, with and without memo modification, they both should remain in the cache after load
+ modify_cache_entry(0, 1500, modify_memo=False)
+ modify_cache_entry(1, 1500, modify_memo=True)
+
+ invalid_entries = [
+ modify_cache_entry(2, 2000, modify_memo=False),
+ modify_cache_entry(3, 2000, modify_memo=True),
+ modify_cache_entry(4, 50000, modify_memo=False),
+ modify_cache_entry(5, 50000, modify_memo=True),
+ ]
+
+ plot_infos = env.dir_1.plot_info_list()
+ # Make sure the cache currently contains all plots from dir1
+ assert_cache(plot_infos)
+ # Write the modified cache entries to the file
+ cache_path.write_bytes(bytes(VersionedBlob(uint16(CURRENT_VERSION), bytes(cache_data))))
+ # And now test that plots in invalid_entries are not longer loaded
+ assert_cache([plot_info for plot_info in plot_infos if plot_info.prover.get_filename() not in invalid_entries])
+
+
@pytest.mark.asyncio
async def test_cache_lifetime(environment: Environment) -> None:
# Load a directory to produce a cache file
| [Bug] plots beyond ~4400 = harvester 100.0 load, cache_hit: false, plots check hangs before challenges
### What happened?
Noted that for the last few releases, chia_harvester was pegging a thread continuously while farming.
Info:
- System has >20k plots direct attached. Single harvester.
- plot_refresh_callback completes in 15 seconds and proof checks are typically 0.4-1 sec.
- Aside from chia_harvester constantly pegging its thread, all else appears to function normally.
Elaboration:
- Reinstalled chia_blockchain from scratch, only importing keys and mainnet/wallet db's. No change.
- Experimented with varying numbers of plots and noted that at below ~4400 plots, chia_harvester no longer pegs a thread (dropped to 0.0 load). Added 200 plots back and load jumped back to 100.0 indefinitely.
- Experimented with various harvester config settings (num_threads, parallel_reads, batch_size). No change.
- Noted that upon startup, and with >4400 plots, the found_plot messages from harvester transition from `cache_hit: True` to `cache_hit: False`.
- Also noted that attempting to run a `chia plots check` on any of the drives/plots with `cache_hit: False` results in an indefinite hang of that check before it issues a single challenge.
- Rewards are tracking for my total plot count (not 4400), so while the `cache_hit: False` causes high harvester CPU usage and inability to check those plots, they are still successfully farming.
Possible causes:
- This feels like high plot counts not playing nicely with plot_refresh / chia.plotting.cache, resulting in one of the harvester threads pegging indefinitely while attempting to cache some portion of plots over some maximum, and perhaps that same thread fails to respond to a plots check of those same plots?
### Version
1.5.0
### What platform are you using?
Linux
### What ui mode are you using?
CLI
### Relevant log output
_No response_
| Updated to 1.5.1 and cleared all settings, starting clean.
- chia_harvester still remains at constant 100.0 load while farming with >~4k plots.
- still see `cache_hit: false` on a large portion of plots.
- `chia plots check` of previously troublesome ranges takes a long time to start challenges (with its process pegged at 100.0 during the delay of several minutes per 1k plots in the selected range to check), but does eventually begin, and completes without error.
- confirmed with another large farmer that they too are seeing chia_harvester remain at 100.0 load while farming.
Can you try to delete `~/.chia/mainnet/cache/plot_manager.dat`, and restart the harvester? This will repopulate the plot location cache, but note that on the initial discovery of all your plots, this may take a while to complete. All following startups will use the cache and should not experience the same load.
I've tried that multiple times while troubleshooting here, including during the update to 1.5.1 where I cleared the `~/.chia/mainnet` folder (only imported blockchain and blockchain_wallet db's and re-added plot_directories to config). The subsequent startup does take a bit longer, and it recreates the cache file as expected, but all other symptoms remain identical.
Regardless of time farming, or even repeated `chia plots check` operations, affected plots (most of them) still show as `cache_hit: False` in logs and `chia_harvester` continues running at 100-110 load while farming (never below 100.0).
Re: running a `chia plots check`, even if I `-g` it to filter down to a single plot, after it finishes loading plots, if that plot showed as a `cache hit: False`, then the check operation will hang for several minutes before issuing challenges. If I filter only to plots that return `cache_hit: True` then challenges will begin immediately after the plots are loaded.
我也有类似问题,自从使用了1.5.0新版本和部分1.5 FORK,收割机的CPU都变成了%100,以前小于 %20
![image](https://user-images.githubusercontent.com/70252155/186970759-70e91ab7-2ae6-4450-8100-d8457983ed7c.png)
![image](https://user-images.githubusercontent.com/70252155/186970840-c8e22801-27bf-440e-90ae-1844fa0e34ac.png)
这不是同一台机器。这是不同的机器。
![image](https://user-images.githubusercontent.com/70252155/186971289-93ba0e98-120c-4b25-ac5e-a546590a855b.png)
可以看到Chia和2个Fork 占用很高。这三个均为1.5.0的版本,旧的版本占用很低。
![image](https://user-images.githubusercontent.com/70252155/186971754-05c0397e-b118-4f4a-b862-fea49c778e41.png)
我的每台机器plot数量4000+
我的系统每次启动起来是自动删除 C:\Users\Administrator\.*文件夹。不存在提到的缓存问题。
> 我的系统每次启动起来是自动删除 C:\Users\Administrator.*文件夹。不存在提到的缓存问题。
It could still be a caching-related issue since it would create a new cache on the next startup (and the cache is then used while the harvester runs). Either way, we won't know unless we can figure out a way to tell what those pegged harvester threads are doing.
为什么新版本占用cpu过高?
Same problem, harvester CPU 100%, Debian, GUI. This happens not only when the number of plots is over 4k. I have several hard drives with rafts and the problem appears when attaching certain rafts to the harvester. Since I mixed plotting using "Chia Proof of space v1.0", "Bladebit also an early version" and "MadMAX plotter", I have a hunch that this is due to some particular plotter, although I may be wrong.
All of my plots should be BladeBit replots (for pooling), so that shouldn't be the issue.
4078 bladebit plots, same hang when trying to check plots even with -n 5 -- waited several hours, nothing happened. broken up to directories of ~100 plots if relevant. 1.5.1.dev
@malventano Can you please provide some logs with `DEBUG` log level?
Also:
> still see cache_hit: false on a large portion of plots.
Not sure if i understand how you mean this. Do you see this on each refresh event for the same plots while the harvester is running or is it only after start and for random plots or whats going on there exactly?
> @malventano Can you please provide some logs with `DEBUG` log level?
I poked around DEBUG level logs but nothing obvious stood out. Which activities would you like DEBUG logs for? Harvester startup with/without plot_manager.dat present? While attempting a plots check?
> Not sure if i understand how you mean this. Do you see this on each refresh event for the same plots while the harvester is running or is it only after start and for random plots or whats going on there exactly?
I see these events in log during harvester start and when attempting to do a plots check. Example:
```
2022-09-14T17:55:08.360 chia.plotting.cache : INFO Loaded 28925466 bytes of cached data
2022-09-14T17:55:08.669 chia.plotting.check_plots : INFO event: started, loaded 0 plots, 24286 remaining
2022-09-14T17:55:08.772 chia.plotting.manager : INFO Only loading plots that contain "/mnt/e" in the file or directory name
2022-09-14T17:55:08.784 chia.plotting.check_plots : INFO event: batch_processed, loaded 0 plots, 23986 remaining
2022-09-14T17:55:08.784 chia.plotting.manager : INFO Only loading plots that contain "/mnt/e" in the file or directory name
(...)
2022-09-14T17:55:09.104 chia.plotting.check_plots : INFO event: batch_processed, loaded 0 plots, 13486 remaining
2022-09-14T17:55:09.104 chia.plotting.manager : INFO Only loading plots that contain "/mnt/e" in the file or directory name
2022-09-14T17:55:09.201 chia.plotting.manager : INFO Found plot /mnt/e01/plot-k32-2021-08-20-06-17-008935afe5dc515551913458dc6690be94f7b2976db134928a558722
52213646.plot of size 32, cache_hit: False
2022-09-14T17:55:09.220 chia.plotting.manager : INFO Found plot /mnt/e01/plot-k32-2021-08-26-19-39-592674758f32cef423d4d689d6130601ba9b4dda876addc57b6aa8d3
8851c611.plot of size 32, cache_hit: False
2022-09-14T17:55:09.225 chia.plotting.manager : INFO Found plot /mnt/e01/plot-k32-2021-08-26-20-51-f2ffde52daa51eaa90925aec44ad80280b4e6072549788b778d6eda3
5fda08bb.plot of size 32, cache_hit: False
2022-09-14T17:55:09.230 chia.plotting.manager : INFO Found plot /mnt/e01/plot-k32-2021-08-20-07-10-f0e01eb38bbbace2205dbf92e6d3bc02b06d9038c1009219363ffc65
4c472eca.plot of size 32, cache_hit: False
2022-09-14T17:55:09.247 chia.plotting.manager : INFO Found plot /mnt/e01/plot-k32-2021-08-26-18-28-2fdd05f2542dad6ab1e431e9f75634ac626356b397c3ed967f6920b2
30c419a6.plot of size 32, cache_hit: False
2022-09-14T17:55:09.277 chia.plotting.manager : INFO Found plot /mnt/e01/plot-k32-2021-08-26-10-24-97b3a3db56174d60a62083d39f4be3450f1e6ecfeeb131e51b4cc4a5
576c2078.plot of size 32, cache_hit: False
```
...as compared with a check against a range that was cached properly:
```
2022-09-14T17:52:46.974 chia.plotting.cache : INFO Loaded 3597591420 bytes of cached data
2022-09-14T17:52:58.113 chia.plotting.check_plots : INFO event: started, loaded 0 plots, 24286 remaining
2022-09-14T17:52:58.222 chia.plotting.manager : INFO Only loading plots that contain "/mnt/d27" in the file or directory name
2022-09-14T17:52:58.233 chia.plotting.check_plots : INFO event: batch_processed, loaded 0 plots, 23986 remaining
2022-09-14T17:52:58.234 chia.plotting.manager : INFO Only loading plots that contain "/mnt/d27" in the file or directory name
(...)
2022-09-14T17:52:58.344 chia.plotting.check_plots : INFO event: batch_processed, loaded 0 plots, 19786 remaining
2022-09-14T17:52:58.344 chia.plotting.manager : INFO Only loading plots that contain "/mnt/d27" in the file or directory name
2022-09-14T17:52:58.349 chia.plotting.manager : INFO Found plot /mnt/d27/plot-k32-2021-08-08-05-46-ef9240da867fd01e48d817f6e1ddf0517b703754863a302f67422d82
42edcfea.plot of size 32, cache_hit: True
2022-09-14T17:52:58.349 chia.plotting.manager : INFO Found plot /mnt/d27/plot-k32-2021-08-08-01-16-78a37f51777b07f9d5f0d395cc0c987e79ce0581d6b14fdf5e4ffd4f
6abf9d02.plot of size 32, cache_hit: True
2022-09-14T17:52:58.350 chia.plotting.manager : INFO Found plot /mnt/d27/plot-k32-2021-08-08-05-56-8c22aa8c3118e76d1a6e4bba3cecb961b6872601eb40b1e0bd4f938d
ead70072.plot of size 32, cache_hit: True
2022-09-14T17:52:58.350 chia.plotting.manager : INFO Found plot /mnt/d27/plot-k32-2021-08-08-06-16-0cdab955ad3f372d27dd261f47c38f33e6a473e63c4b12c164b367d1
99bc4d59.plot of size 32, cache_hit: True
2022-09-14T17:52:58.350 chia.plotting.manager : INFO Found plot /mnt/d27/plot-k32-2021-08-08-06-26-647c3e28d82478748889b0484455f004cc04f97d52ed776156e4197c
61c17688.plot of size 32, cache_hit: True
2022-09-14T17:52:58.350 chia.plotting.manager : INFO Found plot /mnt/d27/plot-k32-2021-08-08-06-26-978e931b10552fba6285214ae7de3142e9f43d1551e27cbd1b78d171
145df96d.plot of size 32, cache_hit: True
```
They do not repeat during refresh events, but I did note these other errors appearing in INFO level logs at regular intervals - could be related:
```
2022-09-14T09:16:30.118 harvester chia.plotting.cache : ERROR Failed to save cache: Value 5794656522 does not fit into uint32, Traceback (most recent call last):
2022-09-14T09:18:32.225 harvester chia.harvester.harvester: INFO _plot_refresh_callback: event done, loaded 0, removed 0, processed 24286, remaining 0, duration: 1.02 seconds, total plots: 24286
2022-09-14T10:58:45.556 harvester chia.plotting.cache : ERROR Failed to save cache: Value 5794656522 does not fit into uint32, Traceback (most recent call last):
2022-09-14T11:00:47.660 harvester chia.harvester.harvester: INFO _plot_refresh_callback: event done, loaded 0, removed 0, processed 24286, remaining 0, duration: 0.88 seconds, total plots: 24286
2022-09-14T12:42:38.391 harvester chia.plotting.cache : ERROR Failed to save cache: Value 5794656522 does not fit into uint32, Traceback (most recent call last):
2022-09-14T12:44:40.528 harvester chia.harvester.harvester: INFO _plot_refresh_callback: event done, loaded 0, removed 0, processed 24286, remaining 0, duration: 0.99 seconds, total plots: 24286
2022-09-14T14:25:30.126 harvester chia.plotting.cache : ERROR Failed to save cache: Value 5794656522 does not fit into uint32, Traceback (most recent call last):
2022-09-14T14:27:32.312 harvester chia.harvester.harvester: INFO _plot_refresh_callback: event done, loaded 0, removed 0, processed 24286, remaining 0, duration: 0.98 seconds, total plots: 24286
```
```
2022-09-14T09:16:30.118 harvester chia.plotting.cache : ERROR Failed to save cache: Value 5794656522 does not fit into uint32, Traceback (most recent call last):
```
Can you also post the traceback which comes after this line ^
> Can you also post the traceback which comes after this line ^
Oh, sorry, I was tailing into a grep for harvester for that. Here's the most recent one in my log:
```
2022-09-14T17:55:46.871 harvester chia.plotting.cache : ERROR Failed to save cache: Value 5794656522 does not fit into uint32, Traceback (most recent call last):
File "/root/chia-blockchain/chia/plotting/cache.py", line 121, in save
serialized: bytes = bytes(disk_cache)
File "/root/chia-blockchain/chia/util/streamable.py", line 616, in __bytes__
self.stream(f)
File "/root/chia-blockchain/chia/util/streamable.py", line 602, in stream
field.stream_function(getattr(self, field.name), f)
File "/root/chia-blockchain/chia/util/streamable.py", line 434, in stream_bytes
write_uint32(f, uint32(len(item)))
File "/root/chia-blockchain/chia/util/struct_stream.py", line 67, in __init__
raise ValueError(f"Value {self} does not fit into {type(self).__name__}")
ValueError: Value 5794656522 does not fit into uint32
```
Thanks, this helps 👍
> Thanks, this helps 👍
Of course. Just remember I'm not sure if that error is related to the CPU use issue, but here's hoping! :)
Can you post the full debug.log either here or send it to me or on keybase http://keybase.io/dustinface?
Okay so.. turned out that the reason for all this are plots created via bladebit RAM plotter where the `DiskProver` serializes into 524.659 bytes which:
- Obviously takes a very long time based on the number of those plots
- Lets the cache grow like crazy so that we end up with a number of bytes which doesn't fit into `uint32` -> `Value 5794656522 does not fit into uint32` while we seralize the length of the bytes.
- Leads to refresh thread constantly working on the serialization and as soon as its done it fails to write for the reason above and then in the next refresh event it tries the same again. This seems to be the reason for the 100% peg.
The reason why the `DiskProver` serializes into such a huge blob is that those plots seem to have 65.536 `C2` entries.
Table pointers from a plot in question with `table_begin_pointers[10] - table_begin_pointers[9]` -> 262.144:
```
table_begin_pointers = {std::vector<unsigned long long>} size=11
[0] = {unsigned long long} 0
[1] = {unsigned long long} 262144
[2] = {unsigned long long} 14839185408
[3] = {unsigned long long} 28822208512
[4] = {unsigned long long} 42911924224
[5] = {unsigned long long} 57272958976
[6] = {unsigned long long} 72367734784
[7] = {unsigned long long} 89824165888
[8] = {unsigned long long} 107538284544
[9] = {unsigned long long} 107540119552
[10] = {unsigned long long} 107540381696
```
Table pointers from a normally working plot with `table_begin_pointers[10] - table_begin_pointers[9]` -> 176:
```
table_begin_pointers = {std::vector<unsigned long long>} size=11
[0] = {unsigned long long} 0
[1] = {unsigned long long} 252
[2] = {unsigned long long} 14839436976
[3] = {unsigned long long} 28822365051
[4] = {unsigned long long} 42911861451
[5] = {unsigned long long} 57273202401
[6] = {unsigned long long} 72368924901
[7] = {unsigned long long} 89827257426
[8] = {unsigned long long} 107543532882
[9] = {unsigned long long} 107545250830
[10] = {unsigned long long} 107545251006
```
Im going to talk with @harold-b about this and will post an update once we figured this out.
| 2022-09-21T22:57:15Z | [] | [] |
Chia-Network/chia-blockchain | 14,997 | Chia-Network__chia-blockchain-14997 | [
"7648"
] | 4b594637c05268a164ecbf6b3536a67930291fa6 | diff --git a/chia/server/reconnect_task.py b/chia/server/reconnect_task.py
deleted file mode 100644
--- a/chia/server/reconnect_task.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from __future__ import annotations
-
-import asyncio
-from logging import Logger
-
-from chia.server.server import ChiaServer
-from chia.types.peer_info import PeerInfo
-
-
-def start_reconnect_task(server: ChiaServer, peer_info: PeerInfo, log: Logger) -> asyncio.Task[None]:
- """
- Start a background task that checks connection and reconnects periodically to a peer.
- """
-
- async def connection_check() -> None:
- while True:
- peer_retry = True
- for _, connection in server.all_connections.items():
- if connection.get_peer_info() == peer_info:
- peer_retry = False
- if peer_retry:
- log.info(f"Reconnecting to peer {peer_info}")
- try:
- await server.start_client(peer_info, None)
- except Exception as e:
- log.info(f"Failed to connect to {peer_info} {e}")
- await asyncio.sleep(3)
-
- return asyncio.create_task(connection_check())
diff --git a/chia/server/start_farmer.py b/chia/server/start_farmer.py
--- a/chia/server/start_farmer.py
+++ b/chia/server/start_farmer.py
@@ -11,12 +11,11 @@
from chia.rpc.farmer_rpc_api import FarmerRpcApi
from chia.server.outbound_message import NodeType
from chia.server.start_service import RpcInfo, Service, async_run
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import UnresolvedPeerInfo
from chia.util.chia_logging import initialize_service_logging
from chia.util.config import load_config, load_config_cli
from chia.util.default_root import DEFAULT_ROOT_PATH
from chia.util.keychain import Keychain
-from chia.util.network import get_host_addr
# See: https://bugs.python.org/issue29288
"".encode("idna")
@@ -34,12 +33,8 @@ def create_farmer_service(
) -> Service[Farmer]:
service_config = config[SERVICE_NAME]
- connect_peers = []
fnp = service_config.get("full_node_peer")
- if fnp is not None:
- connect_peers.append(
- PeerInfo(str(get_host_addr(fnp["host"], prefer_ipv6=config.get("prefer_ipv6", False))), fnp["port"])
- )
+ connect_peers = set() if fnp is None else {UnresolvedPeerInfo(fnp["host"], fnp["port"])}
overrides = service_config["network_overrides"]["constants"][service_config["selected_network"]]
updated_constants = consensus_constants.replace_str_to_bytes(**overrides)
diff --git a/chia/server/start_harvester.py b/chia/server/start_harvester.py
--- a/chia/server/start_harvester.py
+++ b/chia/server/start_harvester.py
@@ -11,11 +11,10 @@
from chia.rpc.harvester_rpc_api import HarvesterRpcApi
from chia.server.outbound_message import NodeType
from chia.server.start_service import RpcInfo, Service, async_run
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import UnresolvedPeerInfo
from chia.util.chia_logging import initialize_service_logging
from chia.util.config import load_config, load_config_cli
from chia.util.default_root import DEFAULT_ROOT_PATH
-from chia.util.network import get_host_addr
# See: https://bugs.python.org/issue29288
"".encode("idna")
@@ -27,7 +26,7 @@ def create_harvester_service(
root_path: pathlib.Path,
config: Dict[str, Any],
consensus_constants: ConsensusConstants,
- farmer_peer: Optional[PeerInfo],
+ farmer_peer: Optional[UnresolvedPeerInfo],
connect_to_daemon: bool = True,
) -> Service[Harvester]:
service_config = config[SERVICE_NAME]
@@ -49,7 +48,7 @@ def create_harvester_service(
node_type=NodeType.HARVESTER,
advertised_port=service_config["port"],
service_name=SERVICE_NAME,
- connect_peers=[] if farmer_peer is None else [farmer_peer],
+ connect_peers=set() if farmer_peer is None else {farmer_peer},
network_id=network_id,
rpc_info=rpc_info,
connect_to_daemon=connect_to_daemon,
@@ -63,9 +62,7 @@ async def async_main() -> int:
service_config = load_config_cli(DEFAULT_ROOT_PATH, "config.yaml", SERVICE_NAME)
config[SERVICE_NAME] = service_config
initialize_service_logging(service_name=SERVICE_NAME, config=config)
- farmer_peer = PeerInfo(
- str(get_host_addr(service_config["farmer_peer"]["host"])), service_config["farmer_peer"]["port"]
- )
+ farmer_peer = UnresolvedPeerInfo(service_config["farmer_peer"]["host"], service_config["farmer_peer"]["port"])
service = create_harvester_service(DEFAULT_ROOT_PATH, config, DEFAULT_CONSTANTS, farmer_peer)
await service.setup_process_global_state()
await service.run()
diff --git a/chia/server/start_service.py b/chia/server/start_service.py
--- a/chia/server/start_service.py
+++ b/chia/server/start_service.py
@@ -9,7 +9,7 @@
import sys
from pathlib import Path
from types import FrameType
-from typing import Any, Awaitable, Callable, Coroutine, Dict, Generic, List, Optional, Tuple, Type, TypeVar
+from typing import Any, Awaitable, Callable, Coroutine, Dict, Generic, List, Optional, Set, Tuple, Type, TypeVar
from chia.cmds.init_funcs import chia_full_version_str
from chia.daemon.server import service_launch_lock_path
@@ -20,13 +20,13 @@
from chia.server.ssl_context import chia_ssl_ca_paths, private_ssl_ca_paths
from chia.server.upnp import UPnP
from chia.server.ws_connection import WSChiaConnection
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import PeerInfo, UnresolvedPeerInfo
from chia.util.ints import uint16
from chia.util.lock import Lockfile, LockfileError
+from chia.util.network import get_host_addr
from chia.util.setproctitle import setproctitle
from ..protocols.shared_protocol import capabilities
-from .reconnect_task import start_reconnect_task
# this is used to detect whether we are running in the main process or not, in
# signal handlers. We need to ignore signals in the sub processes.
@@ -55,7 +55,7 @@ def __init__(
*,
config: Dict[str, Any],
upnp_ports: List[int] = [],
- connect_peers: List[PeerInfo] = [],
+ connect_peers: Set[UnresolvedPeerInfo] = set(),
on_connect_callback: Optional[Callable[[WSChiaConnection], Awaitable[None]]] = None,
rpc_info: Optional[RpcInfo] = None,
connect_to_daemon: bool = True,
@@ -77,6 +77,7 @@ def __init__(
self._network_id: str = network_id
self.max_request_body_size = max_request_body_size
self._listen = listen
+ self.reconnect_retry_seconds: int = 3
self._log = logging.getLogger(service_name)
self._log.info(f"Starting service {self._service_name} ...")
@@ -129,9 +130,44 @@ def __init__(
self._on_connect_callback = on_connect_callback
self._advertised_port = advertised_port
- self._reconnect_tasks: Dict[PeerInfo, Optional[asyncio.Task[None]]] = {peer: None for peer in connect_peers}
+ self._connect_peers = connect_peers
+ self._connect_peers_task: Optional[asyncio.Task[None]] = None
self.upnp: UPnP = UPnP()
+ async def _connect_peers_task_handler(self) -> None:
+ resolved_peers: Dict[UnresolvedPeerInfo, PeerInfo] = {}
+ prefer_ipv6 = self.config.get("prefer_ipv6", False)
+ while True:
+ for unresolved in self._connect_peers:
+ resolved = resolved_peers.get(unresolved)
+ if resolved is None:
+ try:
+ resolved = PeerInfo(get_host_addr(unresolved.host, prefer_ipv6=prefer_ipv6), unresolved.port)
+ except Exception as e:
+ self._log.warning(f"Failed to resolve {unresolved.host}: {e}")
+ continue
+ self._log.info(f"Add resolved {resolved}")
+ resolved_peers[unresolved] = resolved
+
+ if any(connection.peer_info == resolved for connection in self._server.all_connections.values()):
+ continue
+
+ if not await self._server.start_client(resolved, None):
+ self._log.info(f"Failed to connect to {resolved}")
+ # Re-resolve to make sure the IP didn't change, this helps for example to keep dyndns hostnames
+ # up to date.
+ try:
+ resolved_new = PeerInfo(
+ get_host_addr(unresolved.host, prefer_ipv6=prefer_ipv6), unresolved.port
+ )
+ except Exception as e:
+ self._log.warning(f"Failed to resolve after connection failure {unresolved.host}: {e}")
+ continue
+ if resolved_new != resolved:
+ self._log.info(f"Host {unresolved.host} changed from {resolved} to {resolved_new}")
+ resolved_peers[unresolved] = resolved_new
+ await asyncio.sleep(self.reconnect_retry_seconds)
+
async def start(self) -> None:
# TODO: move those parameters to `__init__`
if self._did_start:
@@ -158,8 +194,7 @@ async def start(self) -> None:
)
self._advertised_port = self._server.get_port()
- for peer in self._reconnect_tasks.keys():
- self.add_peer(peer)
+ self._connect_peers_task = asyncio.create_task(self._connect_peers_task_handler())
self._log.info(
f"Started {self._service_name} service on network_id: {self._network_id} "
@@ -190,11 +225,8 @@ async def run(self) -> None:
self._log.error(f"{self._service_name}: already running")
raise ValueError(f"{self._service_name}: already running") from e
- def add_peer(self, peer: PeerInfo) -> None:
- if self._reconnect_tasks.get(peer) is not None:
- raise ServiceException(f"Peer {peer} already added")
-
- self._reconnect_tasks[peer] = start_reconnect_task(self._server, peer, self._log)
+ def add_peer(self, peer: UnresolvedPeerInfo) -> None:
+ self._connect_peers.add(peer)
async def setup_process_global_state(self) -> None:
# Being async forces this to be run from within an active event loop as is
@@ -242,10 +274,8 @@ def stop(self) -> None:
self.upnp.release(port)
self._log.info("Cancelling reconnect task")
- for task in self._reconnect_tasks.values():
- if task is not None:
- task.cancel()
- self._reconnect_tasks.clear()
+ if self._connect_peers_task is not None:
+ self._connect_peers_task.cancel()
self._log.info("Closing connections")
self._server.close_all()
self._node._close()
diff --git a/chia/server/start_timelord.py b/chia/server/start_timelord.py
--- a/chia/server/start_timelord.py
+++ b/chia/server/start_timelord.py
@@ -12,11 +12,10 @@
from chia.server.start_service import RpcInfo, Service, async_run
from chia.timelord.timelord import Timelord
from chia.timelord.timelord_api import TimelordAPI
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import UnresolvedPeerInfo
from chia.util.chia_logging import initialize_service_logging
from chia.util.config import load_config, load_config_cli
from chia.util.default_root import DEFAULT_ROOT_PATH
-from chia.util.network import get_host_addr
# See: https://bugs.python.org/issue29288
"".encode("idna")
@@ -35,9 +34,9 @@ def create_timelord_service(
) -> Service[Timelord]:
service_config = config[SERVICE_NAME]
- connect_peers = [
- PeerInfo(str(get_host_addr(service_config["full_node_peer"]["host"])), service_config["full_node_peer"]["port"])
- ]
+ connect_peers = {
+ UnresolvedPeerInfo(service_config["full_node_peer"]["host"], service_config["full_node_peer"]["port"])
+ }
overrides = service_config["network_overrides"]["constants"][service_config["selected_network"]]
updated_constants = constants.replace_str_to_bytes(**overrides)
diff --git a/chia/server/start_wallet.py b/chia/server/start_wallet.py
--- a/chia/server/start_wallet.py
+++ b/chia/server/start_wallet.py
@@ -11,12 +11,11 @@
from chia.rpc.wallet_rpc_api import WalletRpcApi
from chia.server.outbound_message import NodeType
from chia.server.start_service import RpcInfo, Service, async_run
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import UnresolvedPeerInfo
from chia.util.chia_logging import initialize_service_logging
from chia.util.config import load_config, load_config_cli
from chia.util.default_root import DEFAULT_ROOT_PATH
from chia.util.keychain import Keychain
-from chia.util.network import get_host_addr
from chia.util.task_timing import maybe_manage_task_instrumentation
from chia.wallet.wallet_node import WalletNode
@@ -50,13 +49,8 @@ def create_wallet_service(
)
peer_api = WalletNodeAPI(node)
fnp = service_config.get("full_node_peer")
+ connect_peers = set() if fnp is None else {UnresolvedPeerInfo(fnp["host"], fnp["port"])}
- if fnp:
- connect_peers = [
- PeerInfo(str(get_host_addr(fnp["host"], prefer_ipv6=config.get("prefer_ipv6", False))), fnp["port"])
- ]
- else:
- connect_peers = []
network_id = service_config["selected_network"]
rpc_port = service_config.get("rpc_port")
rpc_info: Optional[RpcInfo] = None
diff --git a/chia/simulator/setup_nodes.py b/chia/simulator/setup_nodes.py
--- a/chia/simulator/setup_nodes.py
+++ b/chia/simulator/setup_nodes.py
@@ -32,7 +32,7 @@
from chia.simulator.time_out_assert import time_out_assert_custom_interval
from chia.timelord.timelord import Timelord
from chia.types.blockchain_format.sized_bytes import bytes32
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import UnresolvedPeerInfo
from chia.util.hash import std_hash
from chia.util.ints import uint16, uint32
from chia.util.keychain import Keychain
@@ -302,7 +302,7 @@ async def setup_farmer_multi_harvester(
]
farmer_service = await farmer_node_iterators[0].__anext__()
if start_services:
- farmer_peer = PeerInfo(block_tools.config["self_hostname"], uint16(farmer_service._server._port))
+ farmer_peer = UnresolvedPeerInfo(block_tools.config["self_hostname"], uint16(farmer_service._server._port))
else:
farmer_peer = None
harvester_node_iterators = []
@@ -432,7 +432,7 @@ async def setup_full_system_inner(
harvester_iter = setup_harvester(
shared_b_tools,
shared_b_tools.root_path / "harvester",
- PeerInfo(shared_b_tools.config["self_hostname"], farmer_service._server.get_port()),
+ UnresolvedPeerInfo(shared_b_tools.config["self_hostname"], farmer_service._server.get_port()),
consensus_constants,
)
vdf1_port = uint16(find_available_listen_port("vdf1"))
diff --git a/chia/simulator/setup_services.py b/chia/simulator/setup_services.py
--- a/chia/simulator/setup_services.py
+++ b/chia/simulator/setup_services.py
@@ -30,7 +30,7 @@
from chia.simulator.start_simulator import create_full_node_simulator_service
from chia.timelord.timelord import Timelord
from chia.timelord.timelord_launcher import kill_processes, spawn_process
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import UnresolvedPeerInfo
from chia.util.bech32m import encode_puzzle_hash
from chia.util.config import config_path_for_filename, lock_and_load_config, save_config
from chia.util.ints import uint16
@@ -242,7 +242,7 @@ async def setup_wallet_node(
async def setup_harvester(
b_tools: BlockTools,
root_path: Path,
- farmer_peer: Optional[PeerInfo],
+ farmer_peer: Optional[UnresolvedPeerInfo],
consensus_constants: ConsensusConstants,
start_service: bool = True,
) -> AsyncGenerator[Service[Harvester], None]:
diff --git a/chia/types/peer_info.py b/chia/types/peer_info.py
--- a/chia/types/peer_info.py
+++ b/chia/types/peer_info.py
@@ -9,6 +9,12 @@
from chia.util.streamable import Streamable, streamable
+@dataclass(frozen=True)
+class UnresolvedPeerInfo:
+ host: str
+ port: uint16
+
+
# TODO, Replace unsafe_hash with frozen and drop the __init__ as soon as all PeerInfo call sites pass in an IPAddress.
@dataclass(unsafe_hash=True)
class PeerInfo:
| diff --git a/tests/farmer_harvester/test_farmer_harvester.py b/tests/farmer_harvester/test_farmer_harvester.py
--- a/tests/farmer_harvester/test_farmer_harvester.py
+++ b/tests/farmer_harvester/test_farmer_harvester.py
@@ -6,7 +6,7 @@
from chia.farmer.farmer import Farmer
from chia.simulator.time_out_assert import time_out_assert
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import UnresolvedPeerInfo
from chia.util.keychain import generate_mnemonic
@@ -18,6 +18,7 @@ def farmer_is_started(farmer):
async def test_start_with_empty_keychain(farmer_one_harvester_not_started):
_, farmer_service, bt = farmer_one_harvester_not_started
farmer: Farmer = farmer_service._node
+ farmer_service.reconnect_retry_seconds = 1
# First remove all keys from the keychain
bt.local_keychain.delete_all_keys()
# Make sure the farmer service is not initialized yet
@@ -42,6 +43,9 @@ async def test_harvester_handshake(farmer_one_harvester_not_started):
harvester = harvester_service._node
farmer = farmer_service._node
+ farmer_service.reconnect_retry_seconds = 1
+ harvester_service.reconnect_retry_seconds = 1
+
def farmer_has_connections():
return len(farmer.server.get_connections()) > 0
@@ -60,7 +64,7 @@ async def handshake_done() -> bool:
# Start both services and wait a bit
await farmer_service.start()
await harvester_service.start()
- harvester_service.add_peer(PeerInfo(str(farmer_service.self_hostname), farmer_service._server.get_port()))
+ harvester_service.add_peer(UnresolvedPeerInfo(str(farmer_service.self_hostname), farmer_service._server.get_port()))
# Handshake task should be started but the handshake should not be done
await time_out_assert(5, handshake_task_active, True)
assert not await handshake_done()
@@ -76,7 +80,7 @@ async def handshake_done() -> bool:
assert len(harvester.plot_manager.farmer_public_keys) == 0
# Re-start the harvester and make sure the handshake task gets started but the handshake still doesn't go through
await harvester_service.start()
- harvester_service.add_peer(PeerInfo(str(farmer_service.self_hostname), farmer_service._server.get_port()))
+ harvester_service.add_peer(UnresolvedPeerInfo(str(farmer_service.self_hostname), farmer_service._server.get_port()))
await time_out_assert(5, handshake_task_active, True)
assert not await handshake_done()
# Stop the farmer and make sure the handshake_task doesn't block the shutdown
diff --git a/tests/plot_sync/test_plot_sync.py b/tests/plot_sync/test_plot_sync.py
--- a/tests/plot_sync/test_plot_sync.py
+++ b/tests/plot_sync/test_plot_sync.py
@@ -299,6 +299,7 @@ def new_test_dir(name: str, plot_list: List[Path]) -> Directory:
file.write(bytes(100))
harvester_services, farmer_service, bt = farmer_two_harvester_not_started
+ farmer_service.reconnect_retry_seconds = 1
farmer: Farmer = farmer_service._node
await farmer_service.start()
harvesters: List[Harvester] = [
diff --git a/tests/plot_sync/util.py b/tests/plot_sync/util.py
--- a/tests/plot_sync/util.py
+++ b/tests/plot_sync/util.py
@@ -13,7 +13,7 @@
from chia.server.start_service import Service
from chia.simulator.time_out_assert import time_out_assert
from chia.types.blockchain_format.sized_bytes import bytes32
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import PeerInfo, UnresolvedPeerInfo
from chia.util.ints import uint16, uint64
@@ -40,8 +40,9 @@ async def start_harvester_service(harvester_service: Service[Harvester], farmer_
# Set the `last_refresh_time` of the plot manager to avoid initial plot loading
harvester: Harvester = harvester_service._node
harvester.plot_manager.last_refresh_time = time.time()
+ harvester_service.reconnect_retry_seconds = 1
await harvester_service.start()
- harvester_service.add_peer(PeerInfo(str(farmer_service.self_hostname), farmer_service._server.get_port()))
+ harvester_service.add_peer(UnresolvedPeerInfo(str(farmer_service.self_hostname), farmer_service._server.get_port()))
harvester.plot_manager.stop_refreshing()
assert harvester.plot_sync_sender._sync_id == 0
| [BUG] Harvester looks up 'host' just once.
**Describe the bug**
Harvester looks up host just once. If the dynamic IP @ home changes harvester does not reconnect to new IP Address.
```
harvester:
chia_ssl_ca:
crt: config/ssl/ca/chia_ca.crt
key: config/ssl/ca/chia_ca.key
farmer_peer:
host: [remote DNS Name with changing IP]
port: 8447
```
**To Reproduce**
Steps to reproduce the behavior:
1. Setup Full node with Internet @ home where the IP changes after each reconnect.
2. Setup remote Farmer node to use an external DNS (dyndns) service for changing IPs ex. noip.com
3. Change IP
4. Harvester will still use old IP after hours.
**Expected behavior**
Harvester looks up IP every x Minutes or use ex. gethostbyaddr and notices the change and use the new IP address.
**Desktop**
- OS: Linux
- OS Version/Flavor: Debian
- CPU: AMD Ryzen 9 5950X
| 1.2.2 fixed it
can you just do `chia start harvester -r`?
1.2.2 did not fixed it.
I could restart the harvestors but this is manual. I think the software should use dns not IPs for this.
This issue has been flagged as stale as there has been no activity on it in 14 days. If this issue is still affecting you and in need of review, please update it to keep it open.
Still not fixed
This issue has been flagged as stale as there has been no activity on it in 14 days. If this issue is still affecting you and in need of review, please update it to keep it open.
Still no one cares :(
This issue has been flagged as stale as there has been no activity on it in 14 days. If this issue is still affecting you and in need of review, please update it to keep it open.
😢
PRs are welcome, but this is a fairly esoteric setup and is not common in the community.
> I could restart the harvestors but this is manual. I think the software should use dns not IPs for this.
as @emlowe said
this is a very wierd setup that only you are using. If you want to fix it you can but it is very very very unlikely to be fixed by us as again it is a very weird setup.
Since you're on Linux, you could probably create a simple hack to detect IP change and restart the harvester.
See `/etc/dhcp/dhclient-enter-hooks.d/resolvconf`
This issue has been flagged as stale as there has been no activity on it in 14 days. If this issue is still affecting you and in need of review, please update it to keep it open.
This issue was automatically closed because it has been flagged as stale and subsequently passed 7 days with no further activity.
> > I could restart the harvestors but this is manual. I think the software should use dns not IPs for this.
>
> as @emlowe said this is a very wierd setup that only you are using. If you want to fix it you can but it is very very very unlikely to be fixed by us as again it is a very weird setup.
Excuse me? This is not a weird setup and it worked for me for a year until the DNS lookup broke a while ago. There is a reason the DNS exists and declaring that its used by no one is a bit short-sighted ...
If this really will stay on wontfix, why not remove usage of DNS at all or better make a monolithic client without all the farmer/harvester/fullnode/... modules? /s
The esoteric part isn't DNS - it's where your harvesters (presumably local network) are connecting to a Farmer on a _changing public internet IP_ (that is not local network, ie cloud). This is not a common configuration. The use of a remote Farmer itself is uncommon enough, and the majority of those people have their Farmer on a fixed public IP address.
Well, as long as end customers, who are supposed to carry a decentralised cryptocurrency, are offered contracts with 24-hourly disconnects by the providers, or business contracts with static IP are significantly more expensive, there are changing IP adresses and therefor I would not call this an esoteric use case. Even if this use case were not considered, the setting given in several places would be misleading. The use of 'localhost' and not '127.0.0.1' or e.g.: '::1' indicates a resolution of URLs and one expects this to work continuously. A sudden disconnection after a day or so seems like an error of the application and not an intended behaviour. These are just basic expectations on the OSI networking model ... | 2023-04-07T06:47:20Z | [] | [] |
Chia-Network/chia-blockchain | 15,337 | Chia-Network__chia-blockchain-15337 | [
"13393"
] | 20984a9783020466208f219340455d9eb8f785cc | diff --git a/benchmarks/mempool-long-lived.py b/benchmarks/mempool-long-lived.py
--- a/benchmarks/mempool-long-lived.py
+++ b/benchmarks/mempool-long-lived.py
@@ -81,7 +81,6 @@ def fake_block_record(block_height: uint32, timestamp: uint64) -> BenchBlockReco
async def run_mempool_benchmark() -> None:
-
coin_records: Dict[bytes32, CoinRecord] = {}
async def get_coin_record(coin_id: bytes32) -> Optional[CoinRecord]:
diff --git a/benchmarks/mempool.py b/benchmarks/mempool.py
--- a/benchmarks/mempool.py
+++ b/benchmarks/mempool.py
@@ -2,6 +2,7 @@
import asyncio
import cProfile
+import sys
from contextlib import contextmanager
from dataclasses import dataclass
from subprocess import check_call
@@ -19,6 +20,7 @@
from chia.types.mempool_inclusion_status import MempoolInclusionStatus
from chia.types.spend_bundle import SpendBundle
from chia.types.spend_bundle_conditions import Spend, SpendBundleConditions
+from chia.util.chunks import chunks
from chia.util.ints import uint32, uint64
NUM_ITERS = 200
@@ -27,6 +29,9 @@
@contextmanager
def enable_profiler(profile: bool, name: str) -> Iterator[None]:
+ if sys.version_info < (3, 8):
+ raise Exception(f"Python 3.8 or higher required, running with: {sys.version}")
+
if not profile:
yield
return
@@ -89,6 +94,10 @@ async def get_coin_record(coin_id: bytes32) -> Optional[CoinRecord]:
# these spend the same coins as spend_bundles but with a higher fee
replacement_spend_bundles: List[List[SpendBundle]] = []
+ # these spend the same coins as spend_bundles, but they are organized in
+ # much larger bundles
+ large_spend_bundles: List[List[SpendBundle]] = []
+
timestamp = uint64(1631794488)
height = uint32(1)
@@ -133,6 +142,19 @@ async def get_coin_record(coin_id: bytes32) -> Optional[CoinRecord]:
bundles.append(tx)
replacement_spend_bundles.append(bundles)
+ bundles = []
+ print(" large spend bundles")
+ for coins in chunks(unspent, 200):
+ print(f"{len(coins)} coins")
+ tx = SpendBundle.aggregate(
+ [
+ wt.generate_signed_transaction(uint64(c.amount // 2), wt.get_new_puzzlehash(), c, fee=peer + idx)
+ for c in coins
+ ]
+ )
+ bundles.append(tx)
+ large_spend_bundles.append(bundles)
+
start_height = height
for single_threaded in [False, True]:
if single_threaded:
@@ -157,6 +179,25 @@ async def add_spend_bundles(spend_bundles: List[SpendBundle]) -> None:
suffix = "st" if single_threaded else "mt"
+ print("\nProfiling add_spend_bundle() with large bundles")
+ total_bundles = 0
+ tasks = []
+ with enable_profiler(True, f"add-large-{suffix}"):
+ start = monotonic()
+ for peer in range(NUM_PEERS):
+ total_bundles += len(large_spend_bundles[peer])
+ tasks.append(asyncio.create_task(add_spend_bundles(large_spend_bundles[peer])))
+ await asyncio.gather(*tasks)
+ stop = monotonic()
+ print(f" time: {stop - start:0.4f}s")
+ print(f" per call: {(stop - start) / total_bundles * 1000:0.2f}ms")
+
+ mempool = MempoolManager(get_coin_record, DEFAULT_CONSTANTS, single_threaded=single_threaded)
+
+ height = start_height
+ rec = fake_block_record(height, timestamp)
+ await mempool.new_peak(rec, None)
+
print("\nProfiling add_spend_bundle()")
total_bundles = 0
tasks = []
diff --git a/chia/__main__.py b/chia/__main__.py
new file mode 100644
--- /dev/null
+++ b/chia/__main__.py
@@ -0,0 +1,5 @@
+from __future__ import annotations
+
+from chia.cmds.chia import main
+
+main()
diff --git a/chia/clvm/singleton.py b/chia/clvm/singleton.py
--- a/chia/clvm/singleton.py
+++ b/chia/clvm/singleton.py
@@ -2,6 +2,6 @@
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
-P2_SINGLETON_MOD = load_clvm_maybe_recompile("p2_singleton.clvm")
-SINGLETON_TOP_LAYER_MOD = load_clvm_maybe_recompile("singleton_top_layer.clvm")
-SINGLETON_LAUNCHER = load_clvm_maybe_recompile("singleton_launcher.clvm")
+P2_SINGLETON_MOD = load_clvm_maybe_recompile("p2_singleton.clsp")
+SINGLETON_TOP_LAYER_MOD = load_clvm_maybe_recompile("singleton_top_layer.clsp")
+SINGLETON_LAUNCHER = load_clvm_maybe_recompile("singleton_launcher.clsp")
diff --git a/chia/clvm/spend_sim.py b/chia/clvm/spend_sim.py
--- a/chia/clvm/spend_sim.py
+++ b/chia/clvm/spend_sim.py
@@ -223,7 +223,7 @@ async def farm_block(
) -> Tuple[List[Coin], List[Coin]]:
# Fees get calculated
fees = uint64(0)
- for item in self.mempool_manager.mempool.all_spends():
+ for item in self.mempool_manager.mempool.all_items():
fees = uint64(fees + item.fee)
# Rewards get created
@@ -254,13 +254,13 @@ async def farm_block(
result = self.mempool_manager.create_bundle_from_mempool(peak.header_hash, item_inclusion_filter)
if result is not None:
- bundle, additions, removals = result
+ bundle, additions = result
generator_bundle = bundle
return_additions = additions
- return_removals = removals
+ return_removals = bundle.removals()
await self.coin_store._add_coin_records([self.new_coin_record(addition) for addition in additions])
- await self.coin_store._set_spent([r.name() for r in removals], uint32(self.block_height + 1))
+ await self.coin_store._set_spent([r.name() for r in return_removals], uint32(self.block_height + 1))
# SimBlockRecord is created
generator: Optional[BlockGenerator] = await self.generate_transaction_generator(generator_bundle)
@@ -412,26 +412,21 @@ async def get_additions_and_removals(self, header_hash: bytes32) -> Tuple[List[C
removals: List[CoinRecord] = await self.service.coin_store.get_coins_removed_at_height(block_height)
return additions, removals
- async def get_puzzle_and_solution(self, coin_id: bytes32, height: uint32) -> Optional[CoinSpend]:
+ async def get_puzzle_and_solution(self, coin_id: bytes32, height: uint32) -> CoinSpend:
filtered_generators = list(filter(lambda block: block.height == height, self.service.blocks))
# real consideration should be made for the None cases instead of just hint ignoring
generator: BlockGenerator = filtered_generators[0].transactions_generator # type: ignore[assignment]
coin_record = await self.service.coin_store.get_coin_record(coin_id)
assert coin_record is not None
- error, puzzle, solution = get_puzzle_and_solution_for_coin(generator, coin_record.coin)
- if error:
- return None
- else:
- assert puzzle is not None
- assert solution is not None
- return CoinSpend(coin_record.coin, puzzle, solution)
+ spend_info = get_puzzle_and_solution_for_coin(generator, coin_record.coin)
+ return CoinSpend(coin_record.coin, spend_info.puzzle, spend_info.solution)
async def get_all_mempool_tx_ids(self) -> List[bytes32]:
- return self.service.mempool_manager.mempool.all_spend_ids()
+ return self.service.mempool_manager.mempool.all_item_ids()
async def get_all_mempool_items(self) -> Dict[bytes32, MempoolItem]:
spends = {}
- for item in self.service.mempool_manager.mempool.all_spends():
+ for item in self.service.mempool_manager.mempool.all_items():
spends[item.name] = item
return spends
diff --git a/chia/cmds/chia.py b/chia/cmds/chia.py
--- a/chia/cmds/chia.py
+++ b/chia/cmds/chia.py
@@ -11,6 +11,7 @@
from chia.cmds.configure import configure_cmd
from chia.cmds.data import data_cmd
from chia.cmds.db import db_cmd
+from chia.cmds.dev import dev_cmd
from chia.cmds.farm import farm_cmd
from chia.cmds.init import init_cmd
from chia.cmds.keys import keys_cmd
@@ -83,12 +84,12 @@ def cli(
check_ssl(Path(root_path))
-@cli.command("version", short_help="Show chia version")
+@cli.command("version", help="Show chia version")
def version_cmd() -> None:
print(__version__)
-@cli.command("run_daemon", short_help="Runs chia daemon")
+@cli.command("run_daemon", help="Runs chia daemon")
@click.option(
"--wait-for-unlock",
help="If the keyring is passphrase-protected, the daemon will wait for an unlock command before accessing keys",
@@ -127,6 +128,7 @@ def run_daemon_cmd(ctx: click.Context, wait_for_unlock: bool) -> None:
cli.add_command(passphrase_cmd)
cli.add_command(beta_cmd)
cli.add_command(completion)
+cli.add_command(dev_cmd)
def main() -> None:
diff --git a/chia/cmds/cmds_util.py b/chia/cmds/cmds_util.py
--- a/chia/cmds/cmds_util.py
+++ b/chia/cmds/cmds_util.py
@@ -15,6 +15,7 @@
from chia.rpc.harvester_rpc_client import HarvesterRpcClient
from chia.rpc.rpc_client import RpcClient
from chia.rpc.wallet_rpc_client import WalletRpcClient
+from chia.simulator.simulator_full_node_rpc_client import SimulatorFullNodeRpcClient
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.mempool_submission_status import MempoolSubmissionStatus
from chia.util.config import load_config
@@ -29,6 +30,7 @@
"full_node": FullNodeRpcClient,
"harvester": HarvesterRpcClient,
"data_layer": DataLayerRpcClient,
+ "simulator": SimulatorFullNodeRpcClient,
}
node_config_section_names: Dict[Type[RpcClient], str] = {
@@ -37,6 +39,7 @@
FullNodeRpcClient: "full_node",
HarvesterRpcClient: "harvester",
DataLayerRpcClient: "data_layer",
+ SimulatorFullNodeRpcClient: "full_node",
}
@@ -56,22 +59,18 @@ async def validate_client_connection(
rpc_client: RpcClient,
node_type: str,
rpc_port: int,
- root_path: Path,
- fingerprint: Optional[int],
- login_to_wallet: bool,
-) -> Optional[int]:
+ consume_errors: bool = True,
+) -> bool:
+ connected: bool = True
try:
await rpc_client.healthz()
- if type(rpc_client) == WalletRpcClient and login_to_wallet:
- fingerprint = await get_wallet(root_path, rpc_client, fingerprint)
- if fingerprint is None:
- rpc_client.close()
except ClientConnectorError:
+ if not consume_errors:
+ raise
+ connected = False
print(f"Connection error. Check if {node_type.replace('_', ' ')} rpc is running at {rpc_port}")
print(f"This is normal if {node_type.replace('_', ' ')} is still starting up")
- rpc_client.close()
- await rpc_client.await_closed() # if close is not already called this does nothing
- return fingerprint
+ return connected
@asynccontextmanager
@@ -79,9 +78,8 @@ async def get_any_service_client(
client_type: Type[_T_RpcClient],
rpc_port: Optional[int] = None,
root_path: Path = DEFAULT_ROOT_PATH,
- fingerprint: Optional[int] = None,
- login_to_wallet: bool = True,
-) -> AsyncIterator[Tuple[Optional[_T_RpcClient], Dict[str, Any], Optional[int]]]:
+ consume_errors: bool = True,
+) -> AsyncIterator[Tuple[Optional[_T_RpcClient], Dict[str, Any]]]:
"""
Yields a tuple with a RpcClient for the applicable node type a dictionary of the node's configuration,
and a fingerprint if applicable. However, if connecting to the node fails then we will return None for
@@ -100,16 +98,15 @@ async def get_any_service_client(
# select node client type based on string
node_client = await client_type.create(self_hostname, uint16(rpc_port), root_path, config)
try:
- # check if we can connect to node, and if we can then validate
- # fingerprint access, otherwise return fingerprint and shutdown client
- fingerprint = await validate_client_connection(
- node_client, node_type, rpc_port, root_path, fingerprint, login_to_wallet
- )
- if node_client.session.closed:
- yield None, config, fingerprint
+ # check if we can connect to node
+ connected = await validate_client_connection(node_client, node_type, rpc_port, consume_errors)
+ if connected:
+ yield node_client, config
else:
- yield node_client, config, fingerprint
+ yield None, config
except Exception as e: # this is only here to make the errors more user-friendly.
+ if not consume_errors:
+ raise
print(f"Exception from '{node_type}' {e}:\n{traceback.format_exc()}")
finally:
@@ -216,11 +213,12 @@ async def execute_with_wallet(
extra_params: Dict[str, Any],
function: Callable[[Dict[str, Any], WalletRpcClient, int], Awaitable[None]],
) -> None:
- async with get_any_service_client(WalletRpcClient, wallet_rpc_port, fingerprint=fingerprint) as (
- wallet_client,
- _,
- new_fp,
- ):
- if wallet_client is not None:
- assert new_fp is not None # wallet only sanity check
- await function(extra_params, wallet_client, new_fp)
+ async with get_any_service_client(WalletRpcClient, wallet_rpc_port) as (wallet_client, _):
+ if wallet_client is None:
+ return
+
+ new_fp = await get_wallet(DEFAULT_ROOT_PATH, wallet_client, fingerprint)
+ if new_fp is None:
+ return
+
+ await function(extra_params, wallet_client, new_fp)
diff --git a/chia/cmds/coins.py b/chia/cmds/coins.py
--- a/chia/cmds/coins.py
+++ b/chia/cmds/coins.py
@@ -9,13 +9,13 @@
from chia.util.config import load_config, selected_network_address_prefix
-@click.group("coins", short_help="Manage your wallets coins")
+@click.group("coins", help="Manage your wallets coins")
@click.pass_context
def coins_cmd(ctx: click.Context) -> None:
pass
-@coins_cmd.command("list", short_help="List all coins")
+@coins_cmd.command("list", help="List all coins")
@click.option(
"-p",
"--wallet-rpc-port",
@@ -23,7 +23,7 @@ def coins_cmd(ctx: click.Context) -> None:
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="Id of the wallet to use", type=int, default=1, show_default=True, required=True)
@click.option("-u", "--show-unconfirmed", help="Separately display unconfirmed coins.", is_flag=True)
@click.option(
@@ -85,7 +85,7 @@ def list_cmd(
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, async_list))
-@coins_cmd.command("combine", short_help="Combine dust coins")
+@coins_cmd.command("combine", help="Combine dust coins")
@click.option(
"-p",
"--wallet-rpc-port",
@@ -93,7 +93,7 @@ def list_cmd(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="Id of the wallet to use", type=int, default=1, show_default=True, required=True)
@click.option(
"-a",
@@ -179,7 +179,7 @@ def combine_cmd(
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, async_combine))
-@coins_cmd.command("split", short_help="Split up larger coins")
+@coins_cmd.command("split", help="Split up larger coins")
@click.option(
"-p",
"--wallet-rpc-port",
@@ -187,7 +187,7 @@ def combine_cmd(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="Id of the wallet to use", type=int, default=1, show_default=True, required=True)
@click.option(
"-n",
diff --git a/chia/cmds/completion.py b/chia/cmds/completion.py
--- a/chia/cmds/completion.py
+++ b/chia/cmds/completion.py
@@ -16,13 +16,13 @@
@click.group(
- short_help="Generate shell completion",
+ help="Generate shell completion",
)
def completion() -> None:
pass
-@completion.command(short_help="Generate shell completion code")
+@completion.command(help="Generate shell completion code")
@click.option(
"-s",
"--shell",
diff --git a/chia/cmds/configure.py b/chia/cmds/configure.py
--- a/chia/cmds/configure.py
+++ b/chia/cmds/configure.py
@@ -207,7 +207,7 @@ def configure(
save_config(root_path, "config.yaml", config)
-@click.command("configure", short_help="Modify configuration", no_args_is_help=True)
+@click.command("configure", help="Modify configuration", no_args_is_help=True)
@click.option(
"--testnet",
"-t",
diff --git a/chia/cmds/data.py b/chia/cmds/data.py
--- a/chia/cmds/data.py
+++ b/chia/cmds/data.py
@@ -28,7 +28,7 @@ def run(coro: Coroutine[Any, Any, Optional[Dict[str, Any]]]) -> None:
# raise click.ClickException(message=f"query unsuccessful, response: {response}")
-@click.group("data", short_help="Manage your data")
+@click.group("data", help="Manage your data")
def data_cmd() -> None:
pass
@@ -103,7 +103,7 @@ def create_fee_option() -> Callable[[FC], FC]:
)
-@data_cmd.command("create_data_store", short_help="Create a new data store")
+@data_cmd.command("create_data_store", help="Create a new data store")
@create_rpc_port_option()
@create_fee_option()
def create_data_store(
@@ -115,7 +115,7 @@ def create_data_store(
run(create_data_store_cmd(data_rpc_port, fee))
-@data_cmd.command("get_value", short_help="Get the value for a given key and store")
+@data_cmd.command("get_value", help="Get the value for a given key and store")
@create_data_store_id_option()
@create_key_option()
@click.option("-r", "--root_hash", help="The hexadecimal root hash", type=str, required=False)
@@ -131,7 +131,7 @@ def get_value(
run(get_value_cmd(data_rpc_port, id, key_string, root_hash))
-@data_cmd.command("update_data_store", short_help="Update a store by providing the changelist operations")
+@data_cmd.command("update_data_store", help="Update a store by providing the changelist operations")
@create_data_store_id_option()
@create_changelist_option()
@create_rpc_port_option()
@@ -147,7 +147,7 @@ def update_data_store(
run(update_data_store_cmd(rpc_port=data_rpc_port, store_id=id, changelist=json.loads(changelist_string), fee=fee))
-@data_cmd.command("get_keys", short_help="Get all keys for a given store")
+@data_cmd.command("get_keys", help="Get all keys for a given store")
@create_data_store_id_option()
@click.option("-r", "--root_hash", help="The hexadecimal root hash", type=str, required=False)
@create_rpc_port_option()
@@ -161,7 +161,7 @@ def get_keys(
run(get_keys_cmd(data_rpc_port, id, root_hash))
-@data_cmd.command("get_keys_values", short_help="Get all keys and values for a given store")
+@data_cmd.command("get_keys_values", help="Get all keys and values for a given store")
@create_data_store_id_option()
@click.option("-r", "--root_hash", help="The hexadecimal root hash", type=str, required=False)
@create_rpc_port_option()
@@ -175,7 +175,7 @@ def get_keys_values(
run(get_keys_values_cmd(data_rpc_port, id, root_hash))
-@data_cmd.command("get_root", short_help="Get the published root hash value for a given store")
+@data_cmd.command("get_root", help="Get the published root hash value for a given store")
@create_data_store_id_option()
@create_rpc_port_option()
def get_root(
@@ -187,7 +187,7 @@ def get_root(
run(get_root_cmd(rpc_port=data_rpc_port, store_id=id))
-@data_cmd.command("subscribe", short_help="Subscribe to a store")
+@data_cmd.command("subscribe", help="Subscribe to a store")
@create_data_store_id_option()
@click.option(
"-u",
@@ -208,7 +208,7 @@ def subscribe(
run(subscribe_cmd(rpc_port=data_rpc_port, store_id=id, urls=urls))
-@data_cmd.command("remove_subscription", short_help="Remove server urls that are added via subscribing to urls")
+@data_cmd.command("remove_subscription", help="Remove server urls that are added via subscribing to urls")
@create_data_store_id_option()
@click.option("-u", "--url", "urls", help="Server urls to remove", type=str, multiple=True)
@create_rpc_port_option()
@@ -222,7 +222,7 @@ def remove_subscription(
run(remove_subscriptions_cmd(rpc_port=data_rpc_port, store_id=id, urls=urls))
-@data_cmd.command("unsubscribe", short_help="Completely untrack a store")
+@data_cmd.command("unsubscribe", help="Completely untrack a store")
@create_data_store_id_option()
@create_rpc_port_option()
def unsubscribe(
@@ -235,7 +235,7 @@ def unsubscribe(
@data_cmd.command(
- "get_kv_diff", short_help="Get the inserted and deleted keys and values between an initial and a final hash"
+ "get_kv_diff", help="Get the inserted and deleted keys and values between an initial and a final hash"
)
@create_data_store_id_option()
@click.option("-hash_1", "--hash_1", help="Initial hash", type=str)
@@ -252,7 +252,7 @@ def get_kv_diff(
run(get_kv_diff_cmd(rpc_port=data_rpc_port, store_id=id, hash_1=hash_1, hash_2=hash_2))
-@data_cmd.command("get_root_history", short_help="Get all changes of a singleton")
+@data_cmd.command("get_root_history", help="Get all changes of a singleton")
@create_data_store_id_option()
@create_rpc_port_option()
def get_root_history(
@@ -264,7 +264,7 @@ def get_root_history(
run(get_root_history_cmd(rpc_port=data_rpc_port, store_id=id))
-@data_cmd.command("add_missing_files", short_help="Manually reconstruct server files from the data layer database")
+@data_cmd.command("add_missing_files", help="Manually reconstruct server files from the data layer database")
@click.option(
"-i",
"--ids",
@@ -294,7 +294,7 @@ def add_missing_files(ids: Optional[str], overwrite: bool, foldername: Optional[
)
-@data_cmd.command("add_mirror", short_help="Publish mirror urls on chain")
+@data_cmd.command("add_mirror", help="Publish mirror urls on chain")
@click.option("-i", "--id", help="Store id", type=str, required=True)
@click.option(
"-a", "--amount", help="Amount to spend for this mirror, in mojos", type=int, default=0, show_default=True
@@ -323,7 +323,7 @@ def add_mirror(id: str, amount: int, urls: List[str], fee: Optional[str], data_r
)
-@data_cmd.command("delete_mirror", short_help="Delete an owned mirror by its coin id")
+@data_cmd.command("delete_mirror", help="Delete an owned mirror by its coin id")
@click.option("-c", "--coin_id", help="Coin id", type=str, required=True)
@create_fee_option()
@create_rpc_port_option()
@@ -339,7 +339,7 @@ def delete_mirror(coin_id: str, fee: Optional[str], data_rpc_port: int) -> None:
)
-@data_cmd.command("get_mirrors", short_help="Get a list of all mirrors for a given store")
+@data_cmd.command("get_mirrors", help="Get a list of all mirrors for a given store")
@click.option("-i", "--id", help="Store id", type=str, required=True)
@create_rpc_port_option()
def get_mirrors(id: str, data_rpc_port: int) -> None:
@@ -353,7 +353,7 @@ def get_mirrors(id: str, data_rpc_port: int) -> None:
)
-@data_cmd.command("get_subscriptions", short_help="Get subscribed stores, including the owned stores")
+@data_cmd.command("get_subscriptions", help="Get subscribed stores, including the owned stores")
@create_rpc_port_option()
def get_subscriptions(data_rpc_port: int) -> None:
from chia.cmds.data_funcs import get_subscriptions_cmd
@@ -365,7 +365,7 @@ def get_subscriptions(data_rpc_port: int) -> None:
)
-@data_cmd.command("get_owned_stores", short_help="Get owned stores")
+@data_cmd.command("get_owned_stores", help="Get owned stores")
@create_rpc_port_option()
def get_owned_stores(data_rpc_port: int) -> None:
from chia.cmds.data_funcs import get_owned_stores_cmd
@@ -377,7 +377,7 @@ def get_owned_stores(data_rpc_port: int) -> None:
)
-@data_cmd.command("get_sync_status", short_help="Get locally stored root compared to the root of the singleton")
+@data_cmd.command("get_sync_status", help="Get locally stored root compared to the root of the singleton")
@create_data_store_id_option()
@create_rpc_port_option()
def get_sync_status(
@@ -387,3 +387,18 @@ def get_sync_status(
from chia.cmds.data_funcs import get_sync_status_cmd
run(get_sync_status_cmd(rpc_port=data_rpc_port, store_id=id))
+
+
+@data_cmd.group("plugins", help="Get information about configured uploader/downloader plugins")
+def plugins_cmd() -> None:
+ pass
+
+
+@plugins_cmd.command("check", help="Calls the plugin_info endpoint on all configured plugins")
+@create_rpc_port_option()
+def check_plugins(
+ data_rpc_port: int,
+) -> None:
+ from chia.cmds.data_funcs import check_plugins_cmd
+
+ run(check_plugins_cmd(rpc_port=data_rpc_port))
diff --git a/chia/cmds/data_funcs.py b/chia/cmds/data_funcs.py
--- a/chia/cmds/data_funcs.py
+++ b/chia/cmds/data_funcs.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import json
from decimal import Decimal
from pathlib import Path
from typing import Dict, List, Optional
@@ -14,7 +15,7 @@
async def create_data_store_cmd(rpc_port: Optional[int], fee: Optional[str]) -> None:
final_fee = None if fee is None else uint64(int(Decimal(fee) * units["chia"]))
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.create_data_store(fee=final_fee)
print(res)
@@ -24,7 +25,7 @@ async def get_value_cmd(rpc_port: Optional[int], store_id: str, key: str, root_h
store_id_bytes = bytes32.from_hexstr(store_id)
key_bytes = hexstr_to_bytes(key)
root_hash_bytes = None if root_hash is None else bytes32.from_hexstr(root_hash)
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.get_value(store_id=store_id_bytes, key=key_bytes, root_hash=root_hash_bytes)
print(res)
@@ -38,7 +39,7 @@ async def update_data_store_cmd(
) -> None:
store_id_bytes = bytes32.from_hexstr(store_id)
final_fee = None if fee is None else uint64(int(Decimal(fee) * units["chia"]))
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.update_data_store(store_id=store_id_bytes, changelist=changelist, fee=final_fee)
print(res)
@@ -51,7 +52,7 @@ async def get_keys_cmd(
) -> None:
store_id_bytes = bytes32.from_hexstr(store_id)
root_hash_bytes = None if root_hash is None else bytes32.from_hexstr(root_hash)
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.get_keys(store_id=store_id_bytes, root_hash=root_hash_bytes)
print(res)
@@ -64,7 +65,7 @@ async def get_keys_values_cmd(
) -> None:
store_id_bytes = bytes32.from_hexstr(store_id)
root_hash_bytes = None if root_hash is None else bytes32.from_hexstr(root_hash)
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.get_keys_values(store_id=store_id_bytes, root_hash=root_hash_bytes)
print(res)
@@ -75,7 +76,7 @@ async def get_root_cmd(
store_id: str,
) -> None:
store_id_bytes = bytes32.from_hexstr(store_id)
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.get_root(store_id=store_id_bytes)
print(res)
@@ -87,7 +88,7 @@ async def subscribe_cmd(
urls: List[str],
) -> None:
store_id_bytes = bytes32.from_hexstr(store_id)
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.subscribe(store_id=store_id_bytes, urls=urls)
print(res)
@@ -98,7 +99,7 @@ async def unsubscribe_cmd(
store_id: str,
) -> None:
store_id_bytes = bytes32.from_hexstr(store_id)
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.unsubscribe(store_id=store_id_bytes)
print(res)
@@ -110,7 +111,7 @@ async def remove_subscriptions_cmd(
urls: List[str],
) -> None:
store_id_bytes = bytes32.from_hexstr(store_id)
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.remove_subscriptions(store_id=store_id_bytes, urls=urls)
print(res)
@@ -125,7 +126,7 @@ async def get_kv_diff_cmd(
store_id_bytes = bytes32.from_hexstr(store_id)
hash_1_bytes = bytes32.from_hexstr(hash_1)
hash_2_bytes = bytes32.from_hexstr(hash_2)
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.get_kv_diff(store_id=store_id_bytes, hash_1=hash_1_bytes, hash_2=hash_2_bytes)
print(res)
@@ -136,7 +137,7 @@ async def get_root_history_cmd(
store_id: str,
) -> None:
store_id_bytes = bytes32.from_hexstr(store_id)
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.get_root_history(store_id=store_id_bytes)
print(res)
@@ -145,7 +146,7 @@ async def get_root_history_cmd(
async def add_missing_files_cmd(
rpc_port: Optional[int], ids: Optional[List[str]], overwrite: bool, foldername: Optional[Path]
) -> None:
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.add_missing_files(
store_ids=(None if ids is None else [bytes32.from_hexstr(id) for id in ids]),
@@ -160,7 +161,7 @@ async def add_mirror_cmd(
) -> None:
store_id_bytes = bytes32.from_hexstr(store_id)
final_fee = None if fee is None else uint64(int(Decimal(fee) * units["chia"]))
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.add_mirror(
store_id=store_id_bytes,
@@ -174,7 +175,7 @@ async def add_mirror_cmd(
async def delete_mirror_cmd(rpc_port: Optional[int], coin_id: str, fee: Optional[str]) -> None:
coin_id_bytes = bytes32.from_hexstr(coin_id)
final_fee = None if fee is None else uint64(int(Decimal(fee) * units["chia"]))
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.delete_mirror(
coin_id=coin_id_bytes,
@@ -185,21 +186,21 @@ async def delete_mirror_cmd(rpc_port: Optional[int], coin_id: str, fee: Optional
async def get_mirrors_cmd(rpc_port: Optional[int], store_id: str) -> None:
store_id_bytes = bytes32.from_hexstr(store_id)
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.get_mirrors(store_id=store_id_bytes)
print(res)
async def get_subscriptions_cmd(rpc_port: Optional[int]) -> None:
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.get_subscriptions()
print(res)
async def get_owned_stores_cmd(rpc_port: Optional[int]) -> None:
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.get_owned_stores()
print(res)
@@ -210,7 +211,14 @@ async def get_sync_status_cmd(
store_id: str,
) -> None:
store_id_bytes = bytes32.from_hexstr(store_id)
- async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _, _):
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
if client is not None:
res = await client.get_sync_status(store_id=store_id_bytes)
print(res)
+
+
+async def check_plugins_cmd(rpc_port: Optional[int]) -> None:
+ async with get_any_service_client(DataLayerRpcClient, rpc_port) as (client, _):
+ if client is not None:
+ res = await client.check_plugins()
+ print(json.dumps(res, indent=4, sort_keys=True))
diff --git a/chia/cmds/db.py b/chia/cmds/db.py
--- a/chia/cmds/db.py
+++ b/chia/cmds/db.py
@@ -10,12 +10,12 @@
from chia.cmds.db_validate_func import db_validate_func
-@click.group("db", short_help="Manage the blockchain database")
+@click.group("db", help="Manage the blockchain database")
def db_cmd() -> None:
pass
-@db_cmd.command("upgrade", short_help="upgrade a v1 database to v2")
+@db_cmd.command("upgrade", help="upgrade a v1 database to v2")
@click.option("--input", "in_db_path", default=None, type=click.Path(), help="specify input database file")
@click.option("--output", "out_db_path", default=None, type=click.Path(), help="specify output database file")
@click.option(
@@ -51,7 +51,7 @@ def db_upgrade_cmd(
print(f"FAILED: {e}")
-@db_cmd.command("validate", short_help="validate the (v2) blockchain database. Does not verify proofs")
+@db_cmd.command("validate", help="validate the (v2) blockchain database. Does not verify proofs")
@click.option("--db", "in_db_path", default=None, type=click.Path(), help="Specifies which database file to validate")
@click.option(
"--validate-blocks",
@@ -71,7 +71,7 @@ def db_validate_cmd(ctx: click.Context, in_db_path: Optional[str], validate_bloc
print(f"FAILED: {e}")
-@db_cmd.command("backup", short_help="backup the blockchain database using VACUUM INTO command")
+@db_cmd.command("backup", help="backup the blockchain database using VACUUM INTO command")
@click.option("--backup_file", "db_backup_file", default=None, type=click.Path(), help="Specifies the backup file")
@click.option("--no_indexes", default=False, is_flag=True, help="Create backup without indexes")
@click.pass_context
diff --git a/chia/cmds/dev.py b/chia/cmds/dev.py
new file mode 100644
--- /dev/null
+++ b/chia/cmds/dev.py
@@ -0,0 +1,14 @@
+from __future__ import annotations
+
+import click
+
+from chia.cmds.sim import sim_cmd
+
+
+@click.group("dev", help="Developer commands and tools")
+@click.pass_context
+def dev_cmd(ctx: click.Context) -> None:
+ pass
+
+
+dev_cmd.add_command(sim_cmd)
diff --git a/chia/cmds/farm.py b/chia/cmds/farm.py
--- a/chia/cmds/farm.py
+++ b/chia/cmds/farm.py
@@ -5,12 +5,12 @@
import click
-@click.group("farm", short_help="Manage your farm")
+@click.group("farm", help="Manage your farm")
def farm_cmd() -> None:
pass
-@farm_cmd.command("summary", short_help="Summary of farming information")
+@farm_cmd.command("summary", help="Summary of farming information")
@click.option(
"-p",
"--rpc-port",
@@ -62,7 +62,7 @@ def summary_cmd(
asyncio.run(summary(rpc_port, wallet_rpc_port, harvester_rpc_port, farmer_rpc_port))
-@farm_cmd.command("challenges", short_help="Show the latest challenges")
+@farm_cmd.command("challenges", help="Show the latest challenges")
@click.option(
"-fp",
"--farmer-rpc-port",
diff --git a/chia/cmds/farm_funcs.py b/chia/cmds/farm_funcs.py
--- a/chia/cmds/farm_funcs.py
+++ b/chia/cmds/farm_funcs.py
@@ -15,24 +15,21 @@
async def get_harvesters_summary(farmer_rpc_port: Optional[int]) -> Optional[Dict[str, Any]]:
- async with get_any_service_client(FarmerRpcClient, farmer_rpc_port) as node_config_fp:
- farmer_client, _, _ = node_config_fp
+ async with get_any_service_client(FarmerRpcClient, farmer_rpc_port) as (farmer_client, _):
if farmer_client is not None:
return await farmer_client.get_harvesters_summary()
return None
async def get_blockchain_state(rpc_port: Optional[int]) -> Optional[Dict[str, Any]]:
- async with get_any_service_client(FullNodeRpcClient, rpc_port) as node_config_fp:
- client, _, _ = node_config_fp
+ async with get_any_service_client(FullNodeRpcClient, rpc_port) as (client, _):
if client is not None:
return await client.get_blockchain_state()
return None
async def get_average_block_time(rpc_port: Optional[int]) -> float:
- async with get_any_service_client(FullNodeRpcClient, rpc_port) as node_config_fp:
- client, _, _ = node_config_fp
+ async with get_any_service_client(FullNodeRpcClient, rpc_port) as (client, _):
if client is not None:
blocks_to_compare = 500
blockchain_state = await client.get_blockchain_state()
@@ -55,16 +52,14 @@ async def get_average_block_time(rpc_port: Optional[int]) -> float:
async def get_wallets_stats(wallet_rpc_port: Optional[int]) -> Optional[Dict[str, Any]]:
- async with get_any_service_client(WalletRpcClient, wallet_rpc_port, login_to_wallet=False) as node_config_fp:
- wallet_client, _, _ = node_config_fp
+ async with get_any_service_client(WalletRpcClient, wallet_rpc_port) as (wallet_client, _):
if wallet_client is not None:
return await wallet_client.get_farmed_amount()
return None
async def get_challenges(farmer_rpc_port: Optional[int]) -> Optional[List[Dict[str, Any]]]:
- async with get_any_service_client(FarmerRpcClient, farmer_rpc_port) as node_config_fp:
- farmer_client, _, _ = node_config_fp
+ async with get_any_service_client(FarmerRpcClient, farmer_rpc_port) as (farmer_client, _):
if farmer_client is not None:
return await farmer_client.get_signage_points()
return None
diff --git a/chia/cmds/init.py b/chia/cmds/init.py
--- a/chia/cmds/init.py
+++ b/chia/cmds/init.py
@@ -3,7 +3,7 @@
import click
-@click.command("init", short_help="Create or migrate the configuration")
+@click.command("init", help="Create or migrate the configuration")
@click.option(
"--create-certs",
"-c",
diff --git a/chia/cmds/keys.py b/chia/cmds/keys.py
--- a/chia/cmds/keys.py
+++ b/chia/cmds/keys.py
@@ -5,7 +5,7 @@
import click
-@click.group("keys", short_help="Manage your keys")
+@click.group("keys", help="Manage your keys")
@click.pass_context
def keys_cmd(ctx: click.Context) -> None:
"""Create, delete, view and use your key pairs"""
@@ -16,7 +16,7 @@ def keys_cmd(ctx: click.Context) -> None:
raise RuntimeError("Please initialize (or migrate) your config directory with chia init")
-@keys_cmd.command("generate", short_help="Generates and adds a key to keychain")
+@keys_cmd.command("generate", help="Generates and adds a key to keychain")
@click.option(
"--label",
"-l",
@@ -34,7 +34,7 @@ def generate_cmd(ctx: click.Context, label: Optional[str]) -> None:
check_keys(ctx.obj["root_path"])
-@keys_cmd.command("show", short_help="Displays all the keys in keychain or the key with the given fingerprint")
+@keys_cmd.command("show", help="Displays all the keys in keychain or the key with the given fingerprint")
@click.option(
"--show-mnemonic-seed", help="Show the mnemonic seed of the keys", default=False, show_default=True, is_flag=True
)
@@ -78,7 +78,7 @@ def show_cmd(
show_keys(ctx.obj["root_path"], show_mnemonic_seed, non_observer_derivation, json, fingerprint)
-@keys_cmd.command("add", short_help="Add a private key by mnemonic")
+@keys_cmd.command("add", help="Add a private key by mnemonic")
@click.option(
"--filename",
"-f",
@@ -110,19 +110,19 @@ def add_cmd(ctx: click.Context, filename: str, label: Optional[str]) -> None:
check_keys(ctx.obj["root_path"])
-@keys_cmd.group("label", short_help="Manage your key labels")
+@keys_cmd.group("label", help="Manage your key labels")
def label_cmd() -> None:
pass
-@label_cmd.command("show", short_help="Show the labels of all available keys")
+@label_cmd.command("show", help="Show the labels of all available keys")
def show_label_cmd() -> None:
from .keys_funcs import show_all_key_labels
show_all_key_labels()
-@label_cmd.command("set", short_help="Set the label of a key")
+@label_cmd.command("set", help="Set the label of a key")
@click.option(
"--fingerprint",
"-f",
@@ -143,7 +143,7 @@ def set_label_cmd(fingerprint: int, label: str) -> None:
set_key_label(fingerprint, label)
-@label_cmd.command("delete", short_help="Delete the label of a key")
+@label_cmd.command("delete", help="Delete the label of a key")
@click.option(
"--fingerprint",
"-f",
@@ -157,7 +157,7 @@ def delete_label_cmd(fingerprint: int) -> None:
delete_key_label(fingerprint)
-@keys_cmd.command("delete", short_help="Delete a key by its pk fingerprint in hex form")
+@keys_cmd.command("delete", help="Delete a key by its pk fingerprint in hex form")
@click.option(
"--fingerprint",
"-f",
@@ -175,21 +175,21 @@ def delete_cmd(ctx: click.Context, fingerprint: int) -> None:
check_keys(ctx.obj["root_path"])
-@keys_cmd.command("delete_all", short_help="Delete all private keys in keychain")
+@keys_cmd.command("delete_all", help="Delete all private keys in keychain")
def delete_all_cmd() -> None:
from chia.util.keychain import Keychain
Keychain().delete_all_keys()
-@keys_cmd.command("generate_and_print", short_help="Generates but does NOT add to keychain")
+@keys_cmd.command("generate_and_print", help="Generates but does NOT add to keychain")
def generate_and_print_cmd() -> None:
from .keys_funcs import generate_and_print
generate_and_print()
-@keys_cmd.command("sign", short_help="Sign a message with a private key")
+@keys_cmd.command("sign", help="Sign a message with a private key")
@click.option("--message", "-d", default=None, help="Enter the message to sign in UTF-8", type=str, required=True)
@click.option(
"--fingerprint",
@@ -252,7 +252,7 @@ def parse_signature_json(json_str: str) -> Tuple[str, str, str, str]:
return data["message"], data["pubkey"], data["signature"], data["signing_mode"]
-@keys_cmd.command("verify", short_help="Verify a signature with a pk")
+@keys_cmd.command("verify", help="Verify a signature with a pk")
@click.option("--message", "-d", default=None, help="Enter the signed message in UTF-8", type=str)
@click.option("--public_key", "-p", default=None, help="Enter the pk in hex", type=str)
@click.option("--signature", "-s", default=None, help="Enter the signature in hex", type=str)
@@ -282,7 +282,7 @@ def verify_cmd(message: str, public_key: str, signature: str, as_bytes: bool, js
verify(message, public_key, signature, as_bytes)
-@keys_cmd.group("derive", short_help="Derive child keys or wallet addresses")
+@keys_cmd.group("derive", help="Derive child keys or wallet addresses")
@click.option(
"--fingerprint",
"-f",
@@ -305,7 +305,7 @@ def derive_cmd(ctx: click.Context, fingerprint: Optional[int], filename: Optiona
ctx.obj["filename"] = filename
-@derive_cmd.command("search", short_help="Search the keyring for one or more matching derived keys or wallet addresses")
+@derive_cmd.command("search", help="Search the keyring for one or more matching derived keys or wallet addresses")
@click.argument("search-terms", type=str, nargs=-1)
@click.option(
"--limit", "-l", default=100, show_default=True, help="Limit the number of derivations to search against", type=int
@@ -383,7 +383,7 @@ def search_cmd(
sys.exit(0 if found else 1)
-@derive_cmd.command("wallet-address", short_help="Derive wallet receive addresses")
+@derive_cmd.command("wallet-address", help="Derive wallet receive addresses")
@click.option(
"--index", "-i", help="Index of the first wallet address to derive. Index 0 is the first wallet address.", default=0
)
@@ -420,7 +420,7 @@ def wallet_address_cmd(
)
-@derive_cmd.command("child-key", short_help="Derive child keys")
+@derive_cmd.command("child-key", help="Derive child keys")
@click.option(
"--type",
"-t",
diff --git a/chia/cmds/netspace.py b/chia/cmds/netspace.py
--- a/chia/cmds/netspace.py
+++ b/chia/cmds/netspace.py
@@ -5,7 +5,7 @@
import click
-@click.command("netspace", short_help="Estimate total farmed space on the network")
+@click.command("netspace", help="Estimate total farmed space on the network")
@click.option(
"-p",
"--rpc-port",
diff --git a/chia/cmds/netspace_funcs.py b/chia/cmds/netspace_funcs.py
--- a/chia/cmds/netspace_funcs.py
+++ b/chia/cmds/netspace_funcs.py
@@ -4,7 +4,7 @@
from chia.cmds.cmds_util import get_any_service_client
from chia.rpc.full_node_rpc_client import FullNodeRpcClient
-from chia.util.byte_types import hexstr_to_bytes
+from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.misc import format_bytes
@@ -12,8 +12,7 @@ async def netstorge_async(rpc_port: Optional[int], delta_block_height: str, star
"""
Calculates the estimated space on the network given two block header hashes.
"""
- async with get_any_service_client(FullNodeRpcClient, rpc_port) as node_config_fp:
- client, _, _ = node_config_fp
+ async with get_any_service_client(FullNodeRpcClient, rpc_port) as (client, _):
if client is not None:
if delta_block_height:
if start == "":
@@ -24,7 +23,7 @@ async def netstorge_async(rpc_port: Optional[int], delta_block_height: str, star
newer_block_height = blockchain_state["peak"].height
else:
- newer_block = await client.get_block_record(hexstr_to_bytes(start))
+ newer_block = await client.get_block_record(bytes32.from_hexstr(start))
if newer_block is None:
print("Block header hash", start, "not found.")
return None
@@ -39,7 +38,6 @@ async def netstorge_async(rpc_port: Optional[int], delta_block_height: str, star
network_space_bytes_estimate = await client.get_network_space(
newer_block_header.header_hash, older_block_header.header_hash
)
- assert network_space_bytes_estimate is not None
print(
"Older Block\n"
f"Block Height: {older_block_header.height}\n"
diff --git a/chia/cmds/passphrase.py b/chia/cmds/passphrase.py
--- a/chia/cmds/passphrase.py
+++ b/chia/cmds/passphrase.py
@@ -10,8 +10,8 @@
from chia.util.config import load_config
-@click.group("passphrase", short_help="Manage your keyring passphrase")
-def passphrase_cmd():
+@click.group("passphrase", help="Manage your keyring passphrase")
+def passphrase_cmd() -> None:
pass
@@ -103,28 +103,28 @@ def remove_cmd(ctx: click.Context, current_passphrase_file: Optional[TextIOWrapp
sys.exit(asyncio.run(async_update_daemon_passphrase_cache_if_running(root_path, config)))
-@passphrase_cmd.group("hint", short_help="Manage the optional keyring passphrase hint")
+@passphrase_cmd.group("hint", help="Manage the optional keyring passphrase hint")
def hint_cmd() -> None:
pass
-@hint_cmd.command("display", short_help="Display the keyring passphrase hint")
-def display_hint():
+@hint_cmd.command("display", help="Display the keyring passphrase hint")
+def display_hint() -> None:
from .passphrase_funcs import display_passphrase_hint
display_passphrase_hint()
-@hint_cmd.command("set", short_help="Set or update the keyring passphrase hint")
+@hint_cmd.command("set", help="Set or update the keyring passphrase hint")
@click.argument("hint", nargs=1)
-def set_hint(hint):
+def set_hint(hint: str) -> None:
from .passphrase_funcs import set_passphrase_hint
set_passphrase_hint(hint)
-@hint_cmd.command("remove", short_help="Remove the keyring passphrase hint")
-def remove_hint():
+@hint_cmd.command("remove", help="Remove the keyring passphrase hint")
+def remove_hint() -> None:
from .passphrase_funcs import remove_passphrase_hint
remove_passphrase_hint()
diff --git a/chia/cmds/peer.py b/chia/cmds/peer.py
--- a/chia/cmds/peer.py
+++ b/chia/cmds/peer.py
@@ -8,7 +8,7 @@
from chia.cmds.peer_funcs import peer_async
-@click.command("peer", short_help="Show, or modify peering connections", no_args_is_help=True)
+@click.command("peer", help="Show, or modify peering connections", no_args_is_help=True)
@click.option(
"-p",
"--rpc-port",
diff --git a/chia/cmds/peer_funcs.py b/chia/cmds/peer_funcs.py
--- a/chia/cmds/peer_funcs.py
+++ b/chia/cmds/peer_funcs.py
@@ -1,128 +1,127 @@
-from __future__ import annotations
-
-from pathlib import Path
-from typing import Any, Dict, Optional
-
-from chia.cmds.cmds_util import NODE_TYPES, get_any_service_client
-from chia.rpc.rpc_client import RpcClient
-
-
-async def add_node_connection(rpc_client: RpcClient, add_connection: str) -> None:
- if ":" not in add_connection:
- print("Enter a valid IP and port in the following format: 10.5.4.3:8000")
- else:
- ip, port = (
- ":".join(add_connection.split(":")[:-1]),
- add_connection.split(":")[-1],
- )
- print(f"Connecting to {ip}, {port}")
- try:
- result = await rpc_client.open_connection(ip, int(port))
- err = result.get("error")
- if result["success"] is False or err is not None:
- print(err)
- except Exception:
- print(f"Failed to connect to {ip}:{port}")
-
-
-async def remove_node_connection(rpc_client: RpcClient, remove_connection: str) -> None:
- from chia.server.outbound_message import NodeType
-
- result_txt = ""
- if len(remove_connection) != 8:
- result_txt = "Invalid NodeID. Do not include '.'"
- else:
- connections = await rpc_client.get_connections()
- for con in connections:
- if remove_connection == con["node_id"].hex()[:8]:
- print("Attempting to disconnect", "NodeID", remove_connection)
- try:
- await rpc_client.close_connection(con["node_id"])
- except Exception:
- result_txt = f"Failed to disconnect NodeID {remove_connection}"
- else:
- result_txt = (
- f"NodeID {remove_connection}... {NodeType(con['type']).name} {con['peer_host']} disconnected"
- )
- elif result_txt == "":
- result_txt = f"NodeID {remove_connection}... not found"
- print(result_txt)
-
-
-async def print_connections(rpc_client: RpcClient, trusted_peers: Dict[str, Any]) -> None:
- import time
-
- from chia.server.outbound_message import NodeType
- from chia.util.network import is_trusted_inner
-
- connections = await rpc_client.get_connections()
- print("Connections:")
- print("Type IP Ports NodeID Last Connect" + " MiB Up|Dwn")
- for con in connections:
- last_connect_tuple = time.struct_time(time.localtime(con["last_message_time"]))
- last_connect = time.strftime("%b %d %T", last_connect_tuple)
- mb_down = con["bytes_read"] / (1024 * 1024)
- mb_up = con["bytes_written"] / (1024 * 1024)
-
- host = con["peer_host"]
- # Strip IPv6 brackets
- host = host.strip("[]")
-
- trusted: bool = is_trusted_inner(host, con["node_id"], trusted_peers, False)
- # Nodetype length is 9 because INTRODUCER will be deprecated
- if NodeType(con["type"]) is NodeType.FULL_NODE:
- peak_height = con.get("peak_height", None)
- connection_peak_hash = con.get("peak_hash", None)
- if connection_peak_hash is None:
- connection_peak_hash = "No Info"
- else:
- if connection_peak_hash.startswith(("0x", "0X")):
- connection_peak_hash = connection_peak_hash[2:]
- connection_peak_hash = f"{connection_peak_hash[:8]}..."
- con_str = (
- f"{NodeType(con['type']).name:9} {host:39} "
- f"{con['peer_port']:5}/{con['peer_server_port']:<5}"
- f" {con['node_id'].hex()[:8]}... "
- f"{last_connect} "
- f"{mb_up:7.1f}|{mb_down:<7.1f}"
- f"\n "
- )
- if peak_height is not None:
- con_str += f"-Height: {peak_height:8.0f} -Hash: {connection_peak_hash}"
- else:
- con_str += f"-Height: No Info -Hash: {connection_peak_hash}"
- # Only show when Trusted is True
- if trusted:
- con_str += f" -Trusted: {trusted}"
- else:
- con_str = (
- f"{NodeType(con['type']).name:9} {host:39} "
- f"{con['peer_port']:5}/{con['peer_server_port']:<5}"
- f" {con['node_id'].hex()[:8]}... "
- f"{last_connect} "
- f"{mb_up:7.1f}|{mb_down:<7.1f}"
- )
- print(con_str)
-
-
-async def peer_async(
- node_type: str,
- rpc_port: Optional[int],
- root_path: Path,
- show_connections: bool,
- add_connection: str,
- remove_connection: str,
-) -> None:
- client_type = NODE_TYPES[node_type]
- async with get_any_service_client(client_type, rpc_port, root_path) as node_config_fp:
- rpc_client, config, _ = node_config_fp
- if rpc_client is not None:
- # Check or edit node connections
- if show_connections:
- trusted_peers: Dict[str, Any] = config["full_node"].get("trusted_peers", {})
- await print_connections(rpc_client, trusted_peers)
- # if called together with state, leave a blank line
- if add_connection:
- await add_node_connection(rpc_client, add_connection)
- if remove_connection:
- await remove_node_connection(rpc_client, remove_connection)
+from __future__ import annotations
+
+from pathlib import Path
+from typing import Any, Dict, Optional
+
+from chia.cmds.cmds_util import NODE_TYPES, get_any_service_client
+from chia.rpc.rpc_client import RpcClient
+
+
+async def add_node_connection(rpc_client: RpcClient, add_connection: str) -> None:
+ if ":" not in add_connection:
+ print("Enter a valid IP and port in the following format: 10.5.4.3:8000")
+ else:
+ ip, port = (
+ ":".join(add_connection.split(":")[:-1]),
+ add_connection.split(":")[-1],
+ )
+ print(f"Connecting to {ip}, {port}")
+ try:
+ result = await rpc_client.open_connection(ip, int(port))
+ err = result.get("error")
+ if result["success"] is False or err is not None:
+ print(err)
+ except Exception:
+ print(f"Failed to connect to {ip}:{port}")
+
+
+async def remove_node_connection(rpc_client: RpcClient, remove_connection: str) -> None:
+ from chia.server.outbound_message import NodeType
+
+ result_txt = ""
+ if len(remove_connection) != 8:
+ result_txt = "Invalid NodeID. Do not include '.'"
+ else:
+ connections = await rpc_client.get_connections()
+ for con in connections:
+ if remove_connection == con["node_id"].hex()[:8]:
+ print("Attempting to disconnect", "NodeID", remove_connection)
+ try:
+ await rpc_client.close_connection(con["node_id"])
+ except Exception:
+ result_txt = f"Failed to disconnect NodeID {remove_connection}"
+ else:
+ result_txt = (
+ f"NodeID {remove_connection}... {NodeType(con['type']).name} {con['peer_host']} disconnected"
+ )
+ elif result_txt == "":
+ result_txt = f"NodeID {remove_connection}... not found"
+ print(result_txt)
+
+
+async def print_connections(rpc_client: RpcClient, trusted_peers: Dict[str, Any]) -> None:
+ import time
+
+ from chia.server.outbound_message import NodeType
+ from chia.util.network import is_trusted_peer
+
+ connections = await rpc_client.get_connections()
+ print("Connections:")
+ print("Type IP Ports NodeID Last Connect" + " MiB Up|Dwn")
+ for con in connections:
+ last_connect_tuple = time.struct_time(time.localtime(con["last_message_time"]))
+ last_connect = time.strftime("%b %d %T", last_connect_tuple)
+ mb_down = con["bytes_read"] / (1024 * 1024)
+ mb_up = con["bytes_written"] / (1024 * 1024)
+
+ host = con["peer_host"]
+ # Strip IPv6 brackets
+ host = host.strip("[]")
+
+ trusted: bool = is_trusted_peer(host, con["node_id"], trusted_peers, False)
+ # Nodetype length is 9 because INTRODUCER will be deprecated
+ if NodeType(con["type"]) is NodeType.FULL_NODE:
+ peak_height = con.get("peak_height", None)
+ connection_peak_hash = con.get("peak_hash", None)
+ if connection_peak_hash is None:
+ connection_peak_hash = "No Info"
+ else:
+ if connection_peak_hash.startswith(("0x", "0X")):
+ connection_peak_hash = connection_peak_hash[2:]
+ connection_peak_hash = f"{connection_peak_hash[:8]}..."
+ con_str = (
+ f"{NodeType(con['type']).name:9} {host:39} "
+ f"{con['peer_port']:5}/{con['peer_server_port']:<5}"
+ f" {con['node_id'].hex()[:8]}... "
+ f"{last_connect} "
+ f"{mb_up:7.1f}|{mb_down:<7.1f}"
+ f"\n "
+ )
+ if peak_height is not None:
+ con_str += f"-Height: {peak_height:8.0f} -Hash: {connection_peak_hash}"
+ else:
+ con_str += f"-Height: No Info -Hash: {connection_peak_hash}"
+ # Only show when Trusted is True
+ if trusted:
+ con_str += f" -Trusted: {trusted}"
+ else:
+ con_str = (
+ f"{NodeType(con['type']).name:9} {host:39} "
+ f"{con['peer_port']:5}/{con['peer_server_port']:<5}"
+ f" {con['node_id'].hex()[:8]}... "
+ f"{last_connect} "
+ f"{mb_up:7.1f}|{mb_down:<7.1f}"
+ )
+ print(con_str)
+
+
+async def peer_async(
+ node_type: str,
+ rpc_port: Optional[int],
+ root_path: Path,
+ show_connections: bool,
+ add_connection: str,
+ remove_connection: str,
+) -> None:
+ client_type = NODE_TYPES[node_type]
+ async with get_any_service_client(client_type, rpc_port, root_path) as (rpc_client, config):
+ if rpc_client is not None:
+ # Check or edit node connections
+ if show_connections:
+ trusted_peers: Dict[str, Any] = config["full_node"].get("trusted_peers", {})
+ await print_connections(rpc_client, trusted_peers)
+ # if called together with state, leave a blank line
+ if add_connection:
+ await add_node_connection(rpc_client, add_connection)
+ if remove_connection:
+ await remove_node_connection(rpc_client, remove_connection)
diff --git a/chia/cmds/plotnft.py b/chia/cmds/plotnft.py
--- a/chia/cmds/plotnft.py
+++ b/chia/cmds/plotnft.py
@@ -20,12 +20,12 @@ def validate_fee(ctx: click.Context, param: click.Parameter, value: str) -> str:
return value
-@click.group("plotnft", short_help="Manage your plot NFTs")
+@click.group("plotnft", help="Manage your plot NFTs")
def plotnft_cmd() -> None:
pass
-@plotnft_cmd.command("show", short_help="Show plotnft information")
+@plotnft_cmd.command("show", help="Show plotnft information")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -34,7 +34,7 @@ def plotnft_cmd() -> None:
default=None,
)
@click.option("-i", "--id", help="ID of the wallet to use", type=int, default=None, show_default=True, required=False)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
def show_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int) -> None:
import asyncio
@@ -43,9 +43,7 @@ def show_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int) -> None:
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, {"id": id}, show))
-@plotnft_cmd.command(
- "get_login_link", short_help="Create a login link for a pool. To get the launcher id, use plotnft show."
-)
+@plotnft_cmd.command("get_login_link", help="Create a login link for a pool. To get the launcher id, use plotnft show.")
@click.option("-l", "--launcher_id", help="Launcher ID of the plotnft", type=str, required=True)
def get_login_link_cmd(launcher_id: str) -> None:
import asyncio
@@ -55,9 +53,9 @@ def get_login_link_cmd(launcher_id: str) -> None:
asyncio.run(get_login_link(launcher_id))
-@plotnft_cmd.command("create", short_help="Create a plot NFT")
+@plotnft_cmd.command("create", help="Create a plot NFT")
@click.option("-y", "--yes", help="No prompts", is_flag=True)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-u", "--pool_url", help="HTTPS host:port of the pool to join", type=str, required=False)
@click.option("-s", "--state", help="Initial state of Plot NFT: local or pool", type=str, required=True)
@click.option(
@@ -105,10 +103,10 @@ def create_cmd(
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, create))
-@plotnft_cmd.command("join", short_help="Join a plot NFT to a Pool")
+@plotnft_cmd.command("join", help="Join a plot NFT to a Pool")
@click.option("-y", "--yes", help="No prompts", is_flag=True)
@click.option("-i", "--id", help="ID of the wallet to use", type=int, default=None, show_default=True, required=True)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-u", "--pool_url", help="HTTPS host:port of the pool to join", type=str, required=True)
@click.option(
"-m",
@@ -136,10 +134,10 @@ def join_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int, fee: int
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, join_pool))
-@plotnft_cmd.command("leave", short_help="Leave a pool and return to self-farming")
+@plotnft_cmd.command("leave", help="Leave a pool and return to self-farming")
@click.option("-y", "--yes", help="No prompts", is_flag=True)
@click.option("-i", "--id", help="ID of the wallet to use", type=int, default=None, show_default=True, required=True)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option(
"-m",
"--fee",
@@ -166,9 +164,9 @@ def self_pool_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int, fee
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, self_pool))
-@plotnft_cmd.command("inspect", short_help="Get Detailed plotnft information as JSON")
+@plotnft_cmd.command("inspect", help="Get Detailed plotnft information as JSON")
@click.option("-i", "--id", help="ID of the wallet to use", type=int, default=None, show_default=True, required=True)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -185,9 +183,9 @@ def inspect(wallet_rpc_port: Optional[int], fingerprint: int, id: int) -> None:
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, inspect_cmd))
-@plotnft_cmd.command("claim", short_help="Claim rewards from a plot NFT")
+@plotnft_cmd.command("claim", help="Claim rewards from a plot NFT")
@click.option("-i", "--id", help="ID of the wallet to use", type=int, default=None, show_default=True, required=True)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option(
"-m",
"--fee",
@@ -216,7 +214,7 @@ def claim(wallet_rpc_port: Optional[int], fingerprint: int, id: int, fee: int) -
@plotnft_cmd.command(
"change_payout_instructions",
- short_help="Change the payout instructions for a pool. To get the launcher id, use plotnft show.",
+ help="Change the payout instructions for a pool. To get the launcher id, use plotnft show.",
)
@click.option("-l", "--launcher_id", help="Launcher ID of the plotnft", type=str, required=True)
@click.option("-a", "--address", help="New address for payout instructions", type=str, required=True)
diff --git a/chia/cmds/plotnft_funcs.py b/chia/cmds/plotnft_funcs.py
--- a/chia/cmds/plotnft_funcs.py
+++ b/chia/cmds/plotnft_funcs.py
@@ -173,8 +173,7 @@ async def pprint_pool_wallet_state(
async def show(args: Dict[str, Any], wallet_client: WalletRpcClient, fingerprint: int) -> None:
- async with get_any_service_client(FarmerRpcClient) as node_config_fp:
- farmer_client, config, _ = node_config_fp
+ async with get_any_service_client(FarmerRpcClient) as (farmer_client, config):
if farmer_client is not None:
address_prefix = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"]
summaries_response = await wallet_client.get_wallets()
@@ -222,8 +221,7 @@ async def show(args: Dict[str, Any], wallet_client: WalletRpcClient, fingerprint
async def get_login_link(launcher_id_str: str) -> None:
launcher_id: bytes32 = bytes32.from_hexstr(launcher_id_str)
- async with get_any_service_client(FarmerRpcClient) as node_config_fp:
- farmer_client, _, _ = node_config_fp
+ async with get_any_service_client(FarmerRpcClient) as (farmer_client, _):
if farmer_client is not None:
login_link: Optional[str] = await farmer_client.get_pool_login_link(launcher_id)
if login_link is None:
diff --git a/chia/cmds/plots.py b/chia/cmds/plots.py
--- a/chia/cmds/plots.py
+++ b/chia/cmds/plots.py
@@ -27,7 +27,7 @@ def show_plots(root_path: Path):
print(f"{str_path}")
-@click.group("plots", short_help="Manage your plots")
+@click.group("plots", help="Manage your plots")
@click.pass_context
def plots_cmd(ctx: click.Context):
"""Create, add, remove and check your plots"""
@@ -39,7 +39,7 @@ def plots_cmd(ctx: click.Context):
initialize_logging("", {"log_level": "INFO", "log_stdout": True}, root_path)
-@plots_cmd.command("create", short_help="Create plots")
+@plots_cmd.command("create", help="Create plots")
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option("--override-k", help="Force size smaller than 32", default=False, show_default=True, is_flag=True)
@click.option("-n", "--num", help="Number of plots or challenges", type=int, default=1, show_default=True)
@@ -159,7 +159,7 @@ def create_cmd(
print(e)
-@plots_cmd.command("check", short_help="Checks plots")
+@plots_cmd.command("check", help="Checks plots")
@click.option("-n", "--num", help="Number of plots or challenges", type=int, default=None)
@click.option(
"-g",
@@ -180,7 +180,7 @@ def check_cmd(
check_plots(ctx.obj["root_path"], num, challenge_start, grep_string, list_duplicates, debug_show_memo)
-@plots_cmd.command("add", short_help="Adds a directory of plots")
+@plots_cmd.command("add", help="Adds a directory of plots")
@click.option(
"-d",
"--final_dir",
@@ -200,7 +200,7 @@ def add_cmd(ctx: click.Context, final_dir: str):
print(e)
-@plots_cmd.command("remove", short_help="Removes a directory of plots from config.yaml")
+@plots_cmd.command("remove", help="Removes a directory of plots from config.yaml")
@click.option(
"-d",
"--final_dir",
@@ -216,7 +216,7 @@ def remove_cmd(ctx: click.Context, final_dir: str):
remove_plot_directory(ctx.obj["root_path"], final_dir)
-@plots_cmd.command("show", short_help="Shows the directory of current plots")
+@plots_cmd.command("show", help="Shows the directory of current plots")
@click.pass_context
def show_cmd(ctx: click.Context):
show_plots(ctx.obj["root_path"])
diff --git a/chia/cmds/plotters.py b/chia/cmds/plotters.py
--- a/chia/cmds/plotters.py
+++ b/chia/cmds/plotters.py
@@ -7,7 +7,7 @@
@click.command(
"plotters",
- short_help="Advanced plotting options",
+ help="Advanced plotting options",
context_settings={"ignore_unknown_options": True},
add_help_option=False,
)
diff --git a/chia/cmds/rpc.py b/chia/cmds/rpc.py
--- a/chia/cmds/rpc.py
+++ b/chia/cmds/rpc.py
@@ -52,7 +52,7 @@ def get_routes(service: str, config: Dict[str, Any]) -> Dict[str, Any]:
return asyncio.run(call_endpoint(service, "get_routes", {}, config))
-@click.group("rpc", short_help="RPC Client")
+@click.group("rpc", help="RPC Client")
def rpc_cmd() -> None:
pass
diff --git a/chia/cmds/show.py b/chia/cmds/show.py
--- a/chia/cmds/show.py
+++ b/chia/cmds/show.py
@@ -7,7 +7,7 @@
from chia.cmds.show_funcs import show_async
-@click.command("show", short_help="Show node information", no_args_is_help=True)
+@click.command("show", help="Show node information", no_args_is_help=True)
@click.option(
"-p",
"--rpc-port",
@@ -34,9 +34,7 @@
@click.option(
"-r", "--remove-connection", help="Remove a Node by the first 8 characters of NodeID", type=str, default=""
)
-@click.option(
- "-bh", "--block-header-hash-by-height", help="Look up a block header hash by block height", type=str, default=""
-)
+@click.option("-bh", "--block-header-hash-by-height", help="Look up a block header hash by block height", type=int)
@click.option("-b", "--block-by-header-hash", help="Look up a block by block header hash", type=str, default="")
@click.pass_context
def show_cmd(
@@ -48,17 +46,17 @@ def show_cmd(
connections: bool,
add_connection: str,
remove_connection: str,
- block_header_hash_by_height: str,
+ block_header_hash_by_height: Optional[int],
block_by_header_hash: str,
) -> None:
import asyncio
if connections:
- print("'chia show -c' has been renamed to 'chia peer -c' ")
+ print("'chia show -c' has been renamed to 'chia peer full_node -c' ")
if add_connection != "":
- print("'chia show -a' has been renamed to 'chia peer -a' ")
+ print("'chia show -a' has been renamed to 'chia peer full_node -a' ")
if remove_connection != "":
- print("'chia show -r' has been renamed to 'chia peer -r' ")
+ print("'chia show -r' has been renamed to 'chia peer full_node -r' ")
if wallet_rpc_port is not None:
print("'chia show -wp' is not used, please remove it from your command.")
asyncio.run(
diff --git a/chia/cmds/show_funcs.py b/chia/cmds/show_funcs.py
--- a/chia/cmds/show_funcs.py
+++ b/chia/cmds/show_funcs.py
@@ -99,10 +99,9 @@ async def print_block_from_hash(
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.full_block import FullBlock
from chia.util.bech32m import encode_puzzle_hash
- from chia.util.byte_types import hexstr_to_bytes
- block: Optional[BlockRecord] = await node_client.get_block_record(hexstr_to_bytes(block_by_header_hash))
- full_block: Optional[FullBlock] = await node_client.get_block(hexstr_to_bytes(block_by_header_hash))
+ block: Optional[BlockRecord] = await node_client.get_block_record(bytes32.from_hexstr(block_by_header_hash))
+ full_block: Optional[FullBlock] = await node_client.get_block(bytes32.from_hexstr(block_by_header_hash))
# Would like to have a verbose flag for this
if block is not None:
assert full_block is not None
@@ -191,13 +190,12 @@ async def show_async(
root_path: Path,
print_fee_info_flag: bool,
print_state: bool,
- block_header_hash_by_height: str,
+ block_header_hash_by_height: Optional[int],
block_by_header_hash: str,
) -> None:
from chia.cmds.cmds_util import get_any_service_client
- async with get_any_service_client(FullNodeRpcClient, rpc_port, root_path) as node_config_fp:
- node_client, config, _ = node_config_fp
+ async with get_any_service_client(FullNodeRpcClient, rpc_port, root_path) as (node_client, config):
if node_client is not None:
# Check State
if print_state:
@@ -206,7 +204,7 @@ async def show_async(
if print_fee_info_flag:
await print_fee_info(node_client)
# Get Block Information
- if block_header_hash_by_height != "":
+ if block_header_hash_by_height is not None:
block_header = await node_client.get_block_record_by_height(block_header_hash_by_height)
if block_header is not None:
print(f"Header hash of block {block_header_hash_by_height}: {block_header.header_hash.hex()}")
diff --git a/chia/cmds/sim.py b/chia/cmds/sim.py
new file mode 100644
--- /dev/null
+++ b/chia/cmds/sim.py
@@ -0,0 +1,216 @@
+from __future__ import annotations
+
+import asyncio
+from pathlib import Path
+from typing import Any, Optional
+
+import click
+
+from chia.cmds.sim_funcs import async_config_wizard, farm_blocks, print_status, revert_block_height, set_auto_farm
+from chia.util.default_root import SIMULATOR_ROOT_PATH
+
+
+@click.group("sim", help="Configure and make requests to a Chia Simulator Full Node")
+@click.option(
+ "-p",
+ "--rpc-port",
+ help=(
+ "Set the port where the Simulator is hosting the RPC interface. "
+ "See the rpc_port under full_node in config.yaml"
+ ),
+ type=int,
+ default=None,
+)
+@click.option(
+ "--root-path", default=SIMULATOR_ROOT_PATH, help="Simulator root folder.", type=click.Path(), show_default=True
+)
+@click.option(
+ "-n",
+ "--simulator-name",
+ help="This name is used to determine the sub folder to use in the simulator root folder.",
+ type=str,
+ default="main",
+)
+@click.pass_context
+def sim_cmd(ctx: click.Context, rpc_port: Optional[int], root_path: str, simulator_name: str) -> None:
+ ctx.ensure_object(dict)
+ ctx.obj["root_path"] = Path(root_path) / simulator_name
+ ctx.obj["sim_name"] = simulator_name
+ ctx.obj["rpc_port"] = rpc_port
+
+
+@sim_cmd.command("create", help="Guides you through the process of setting up a Chia Simulator")
+@click.option("-f", "--fingerprint", type=int, required=False, help="Use your fingerprint to skip the key prompt")
+@click.option(
+ "-r",
+ "--reward-address",
+ type=str,
+ required=False,
+ help="Use this address instead of the default farming address.",
+)
+@click.option(
+ "-p", "--plot-directory", type=str, required=False, help="Use a different directory then 'simulator/plots'."
+)
+@click.option("-m", "--mnemonic", type=str, required=False, help="Add to keychain and use a specific mnemonic.")
+@click.option("-a", "--auto-farm", type=bool, default=None, help="Enable or Disable auto farming")
+@click.option(
+ "-d",
+ "--docker-mode",
+ is_flag=True,
+ hidden=True,
+ help="Run non-interactively in Docker Mode, & generate a new key if keychain is empty.",
+)
+@click.option("-b", "--no-bitfield", type=bool, is_flag=True, help="Do not use bitfield when generating plots")
+@click.pass_context
+def create_simulator_config(
+ ctx: click.Context,
+ fingerprint: Optional[int],
+ reward_address: Optional[str],
+ plot_directory: Optional[str],
+ mnemonic: Optional[str],
+ auto_farm: Optional[bool],
+ docker_mode: bool,
+ no_bitfield: bool,
+) -> None:
+ print(f"Using this Directory: {ctx.obj['root_path']}\n")
+ if fingerprint and mnemonic:
+ print("You can't use both a fingerprint and a mnemonic. Please choose one.")
+ return None
+ asyncio.run(
+ async_config_wizard(
+ ctx.obj["root_path"],
+ fingerprint,
+ reward_address,
+ plot_directory,
+ mnemonic,
+ auto_farm,
+ docker_mode,
+ not no_bitfield,
+ )
+ )
+
+
+@sim_cmd.command("start", help="Start service groups while automatically using the right chia_root.")
+@click.option("-r", "--restart", is_flag=True, help="Restart running services")
+@click.option("-w", "--wallet", is_flag=True, help="Start wallet")
+@click.pass_context
+def sim_start_cmd(ctx: click.Context, restart: bool, wallet: bool) -> None:
+ from chia.cmds.start import start_cmd
+
+ group: tuple[str, ...] = ("simulator",)
+ if wallet:
+ group += ("wallet",)
+ ctx.invoke(start_cmd, restart=restart, group=group)
+
+
+@sim_cmd.command("stop", help="Stop running services while automatically using the right chia_root.")
+@click.option("-d", "--daemon", is_flag=True, help="Stop daemon")
+@click.option("-w", "--wallet", is_flag=True, help="Stop wallet")
+@click.pass_context
+def sim_stop_cmd(ctx: click.Context, daemon: bool, wallet: bool) -> None:
+ from chia.cmds.stop import stop_cmd
+
+ group: Any = ("simulator",)
+ if wallet:
+ group += ("wallet",)
+ ctx.invoke(stop_cmd, daemon=daemon, group=group)
+
+
+@sim_cmd.command("status", help="Get information about the state of the simulator.")
+@click.option("-f", "--fingerprint", type=int, help="Get detailed information on this fingerprint.")
+@click.option("--show-key/--no-show-key", help="Show detailed key information.")
+@click.option("-c", "--show-coins", is_flag=True, help="Show all unspent coins.")
+@click.option("-i", "--include-rewards", is_flag=True, help="Include reward coins when showing coins.")
+@click.option("-a", "--show-addresses", is_flag=True, help="Show the balances of all addresses.")
+@click.pass_context
+def status_cmd(
+ ctx: click.Context,
+ fingerprint: Optional[int],
+ show_key: bool,
+ show_coins: bool,
+ include_rewards: bool,
+ show_addresses: bool,
+) -> None:
+ asyncio.run(
+ print_status(
+ ctx.obj["rpc_port"],
+ ctx.obj["root_path"],
+ fingerprint,
+ show_key,
+ show_coins,
+ include_rewards,
+ show_addresses,
+ )
+ )
+
+
+@sim_cmd.command("revert", help="Reset chain to a previous block height.")
+@click.option("-b", "--blocks", type=int, default=1, help="Number of blocks to go back.")
+@click.option("-n", "--new-blocks", type=int, default=1, help="Number of new blocks to add during a reorg.")
+@click.option("-r", "--reset", is_flag=True, help="Reset the chain to the genesis block")
+@click.option(
+ "-f",
+ "--force",
+ is_flag=True,
+ help="Forcefully delete blocks, this is not a reorg but might be needed in very special circumstances."
+ " Note: Use with caution, this will break all wallets.",
+)
+@click.option("-d", "--disable-prompt", is_flag=True, help="Disable confirmation prompt when force reverting.")
+@click.pass_context
+def revert_cmd(
+ ctx: click.Context, blocks: int, new_blocks: int, reset: bool, force: bool, disable_prompt: bool
+) -> None:
+ if force and not disable_prompt:
+ input_str = (
+ "Are you sure you want to force delete blocks? This should only ever be used in special circumstances,"
+ " and will break all wallets. \nPress 'y' to continue, or any other button to exit: "
+ )
+ if input(input_str) != "y":
+ return
+ if reset and not force:
+ print("\n The force flag (-f) is required to reset the chain to the genesis block. \n")
+ return
+ if reset and blocks != 1:
+ print("\nBlocks, '-b' must not be set if all blocks are selected by reset, '-r'. Exiting.\n")
+ return
+ asyncio.run(
+ revert_block_height(
+ ctx.obj["rpc_port"],
+ ctx.obj["root_path"],
+ blocks,
+ new_blocks,
+ reset,
+ force,
+ )
+ )
+
+
+@sim_cmd.command("farm", help="Farm blocks")
+@click.option("-b", "--blocks", type=int, default=1, help="Amount of blocks to create")
+@click.option("-n", "--non-transaction", is_flag=True, help="Allow non-transaction blocks")
+@click.option("-a", "--target-address", type=str, default="", help="Block reward address")
+@click.pass_context
+def farm_cmd(ctx: click.Context, blocks: int, non_transaction: bool, target_address: str) -> None:
+ asyncio.run(
+ farm_blocks(
+ ctx.obj["rpc_port"],
+ ctx.obj["root_path"],
+ blocks,
+ not non_transaction,
+ target_address,
+ )
+ )
+
+
+@sim_cmd.command("autofarm", help="Enable or disable auto farming on transaction submission")
+@click.argument("set-autofarm", type=click.Choice(["on", "off"]), nargs=1, required=True)
+@click.pass_context
+def autofarm_cmd(ctx: click.Context, set_autofarm: str) -> None:
+ autofarm = bool(set_autofarm == "on")
+ asyncio.run(
+ set_auto_farm(
+ ctx.obj["rpc_port"],
+ ctx.obj["root_path"],
+ autofarm,
+ )
+ )
diff --git a/chia/cmds/sim_funcs.py b/chia/cmds/sim_funcs.py
new file mode 100644
--- /dev/null
+++ b/chia/cmds/sim_funcs.py
@@ -0,0 +1,498 @@
+from __future__ import annotations
+
+import asyncio
+import os
+import sys
+from pathlib import Path, PureWindowsPath
+from random import randint
+from typing import Any, Dict, List, Optional
+
+from aiohttp import ClientConnectorError
+from blspy import PrivateKey
+
+from chia.cmds.cmds_util import get_any_service_client
+from chia.cmds.start_funcs import async_start
+from chia.consensus.coinbase import create_puzzlehash_for_pk
+from chia.simulator.simulator_full_node_rpc_client import SimulatorFullNodeRpcClient
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_record import CoinRecord
+from chia.util.bech32m import decode_puzzle_hash, encode_puzzle_hash
+from chia.util.config import load_config, save_config
+from chia.util.errors import KeychainFingerprintExists
+from chia.util.ints import uint32
+from chia.util.keychain import Keychain, bytes_to_mnemonic
+from chia.wallet.derive_keys import (
+ master_sk_to_farmer_sk,
+ master_sk_to_pool_sk,
+ master_sk_to_wallet_sk,
+ master_sk_to_wallet_sk_unhardened,
+)
+
+
+def get_ph_from_fingerprint(fingerprint: int, key_id: int = 1) -> bytes32:
+ priv_key_and_entropy = Keychain().get_private_key_by_fingerprint(fingerprint)
+ if priv_key_and_entropy is None:
+ raise Exception("Fingerprint not found")
+ private_key = priv_key_and_entropy[0]
+ sk_for_wallet_id: PrivateKey = master_sk_to_wallet_sk(private_key, uint32(key_id))
+ puzzle_hash: bytes32 = create_puzzlehash_for_pk(sk_for_wallet_id.get_g1())
+ return puzzle_hash
+
+
+def create_chia_directory(
+ chia_root: Path,
+ fingerprint: int,
+ farming_address: Optional[str],
+ plot_directory: Optional[str],
+ auto_farm: Optional[bool],
+ docker_mode: bool,
+) -> Dict[str, Any]:
+ """
+ This function creates a new chia directory and returns a heavily modified config,
+ suitable for use in the simulator.
+ """
+ from chia.cmds.init_funcs import chia_init
+
+ if not chia_root.is_dir() or not Path(chia_root / "config" / "config.yaml").exists():
+ # create chia directories & load config
+ chia_init(chia_root, testnet=True, fix_ssl_permissions=True)
+ config: Dict[str, Any] = load_config(chia_root, "config.yaml")
+ # apply standard block-tools config.
+ config["full_node"]["send_uncompact_interval"] = 0
+ config["full_node"]["target_uncompact_proofs"] = 30
+ config["full_node"]["peer_connect_interval"] = 50
+ config["full_node"]["sanitize_weight_proof_only"] = False
+ config["logging"]["log_level"] = "INFO" # extra logs for easier development
+ # make sure we don't try to connect to other nodes.
+ config["full_node"]["introducer_peer"] = None
+ config["wallet"]["introducer_peer"] = None
+ config["full_node"]["dns_servers"] = []
+ config["wallet"]["dns_servers"] = []
+ # create custom testnet (simulator0)
+ config["network_overrides"]["constants"]["simulator0"] = config["network_overrides"]["constants"][
+ "testnet0"
+ ].copy()
+ config["network_overrides"]["config"]["simulator0"] = config["network_overrides"]["config"]["testnet0"].copy()
+ sim_genesis = "eb8c4d20b322be8d9fddbf9412016bdffe9a2901d7edb0e364e94266d0e095f7"
+ config["network_overrides"]["constants"]["simulator0"]["GENESIS_CHALLENGE"] = sim_genesis
+ # tell services to use simulator0
+ config["selected_network"] = "simulator0"
+ config["wallet"]["selected_network"] = "simulator0"
+ config["full_node"]["selected_network"] = "simulator0"
+ if not docker_mode: # We want predictable ports for our docker image.
+ # set ports and networks, we don't want to cause a port conflict.
+ port_offset = randint(1, 20000)
+ config["daemon_port"] -= port_offset
+ config["network_overrides"]["config"]["simulator0"]["default_full_node_port"] = 38444 + port_offset
+ # wallet
+ config["wallet"]["port"] += port_offset
+ config["wallet"]["rpc_port"] += port_offset
+ # full node
+ config["full_node"]["port"] -= port_offset
+ config["full_node"]["rpc_port"] += port_offset
+ # connect wallet to full node
+ config["wallet"]["full_node_peer"]["port"] = config["full_node"]["port"]
+ config["full_node"]["wallet_peer"]["port"] = config["wallet"]["port"]
+ # ui
+ config["ui"]["daemon_port"] = config["daemon_port"]
+ else:
+ config["self_hostname"] = "0.0.0.0" # Bind to all interfaces.
+ config["logging"]["log_stdout"] = True # Log to console.
+ else:
+ config = load_config(chia_root, "config.yaml")
+ # simulator overrides
+ config["simulator"]["key_fingerprint"] = fingerprint
+ if farming_address is None:
+ prefix = config["network_overrides"]["config"]["simulator0"]["address_prefix"]
+ farming_address = encode_puzzle_hash(get_ph_from_fingerprint(fingerprint), prefix)
+ config["simulator"]["farming_address"] = farming_address
+ if plot_directory is not None:
+ config["simulator"]["plot_directory"] = plot_directory
+ # Temporary change to fix win / linux differences.
+ config["simulator"]["plot_directory"] = str(Path(config["simulator"]["plot_directory"]))
+ if "//" in config["simulator"]["plot_directory"] and os.name != "nt":
+ # if we're on linux, we need to convert to a linux path.
+ config["simulator"]["plot_directory"] = str(PureWindowsPath(config["simulator"]["plot_directory"]).as_posix())
+ config["simulator"]["auto_farm"] = auto_farm if auto_farm is not None else True
+ farming_ph = decode_puzzle_hash(farming_address)
+ # modify genesis block to give the user the reward
+ simulator_consts = config["network_overrides"]["constants"]["simulator0"]
+ simulator_consts["GENESIS_PRE_FARM_FARMER_PUZZLE_HASH"] = farming_ph.hex()
+ simulator_consts["GENESIS_PRE_FARM_POOL_PUZZLE_HASH"] = farming_ph.hex()
+ # save config and return the config
+ save_config(chia_root, "config.yaml", config)
+ return config
+
+
+def display_key_info(fingerprint: int, prefix: str) -> None:
+ """
+ Display key info for a given fingerprint, similar to the output of `chia keys show`.
+ """
+ print(f"Using fingerprint {fingerprint}")
+ private_key_and_seed = Keychain().get_private_key_by_fingerprint(fingerprint)
+ if private_key_and_seed is None:
+ print(f"Fingerprint {fingerprint} not found")
+ return
+ sk, seed = private_key_and_seed
+ print("\nFingerprint:", sk.get_g1().get_fingerprint())
+ print("Master public key (m):", sk.get_g1())
+ print("Farmer public key (m/12381/8444/0/0):", master_sk_to_farmer_sk(sk).get_g1())
+ print("Pool public key (m/12381/8444/1/0):", master_sk_to_pool_sk(sk).get_g1())
+ first_wallet_sk: PrivateKey = master_sk_to_wallet_sk_unhardened(sk, uint32(0))
+ wallet_address: str = encode_puzzle_hash(create_puzzlehash_for_pk(first_wallet_sk.get_g1()), prefix)
+ print(f"First wallet address: {wallet_address}")
+ assert seed is not None
+ print("Master private key (m):", bytes(sk).hex())
+ print("First wallet secret key (m/12381/8444/2/0):", master_sk_to_wallet_sk(sk, uint32(0)))
+ mnemonic = bytes_to_mnemonic(seed)
+ print(" Mnemonic seed (24 secret words):")
+ print(f"{mnemonic} \n")
+
+
+def generate_and_return_fingerprint(mnemonic: Optional[str] = None) -> int:
+ """
+ Generate and add new PrivateKey and return its fingerprint.
+ """
+ from chia.util.keychain import generate_mnemonic
+
+ if mnemonic is None:
+ print("Generating private key")
+ mnemonic = generate_mnemonic()
+ try:
+ sk = Keychain().add_private_key(mnemonic, None)
+ fingerprint: int = sk.get_g1().get_fingerprint()
+ except KeychainFingerprintExists as e:
+ fingerprint = e.fingerprint
+ print(f"Fingerprint: {fingerprint} for provided private key already exists.")
+ return fingerprint
+ print(f"Added private key with public key fingerprint {fingerprint}")
+ return fingerprint
+
+
+def select_fingerprint(
+ fingerprint: Optional[int] = None, mnemonic_string: Optional[str] = None, auto_generate_key: bool = False
+) -> Optional[int]:
+ """
+ Either select an existing fingerprint or create one and return it.
+ """
+ if mnemonic_string:
+ fingerprint = generate_and_return_fingerprint(mnemonic_string)
+ fingerprints: list[int] = [pk.get_fingerprint() for pk in Keychain().get_all_public_keys()]
+ if fingerprint is not None and fingerprint in fingerprints:
+ return fingerprint
+ elif fingerprint is not None and fingerprint not in fingerprints:
+ print(f"Invalid Fingerprint. Fingerprint {fingerprint} was not found.")
+ return None
+ if auto_generate_key and len(fingerprints) == 1:
+ return fingerprints[0]
+ if len(fingerprints) == 0:
+ if not auto_generate_key:
+ if (
+ input("No keys in keychain. Press 'q' to quit, or press any other key to generate a new key.").lower()
+ == "q"
+ ):
+ return None
+ # generate private key and add to wallet
+ fingerprint = generate_and_return_fingerprint()
+ else:
+ print("Fingerprints:")
+ print(
+ "If you already used one of these keys, select that fingerprint to skip the plotting process."
+ " Otherwise, select any key below."
+ )
+ for i, fp in enumerate(fingerprints):
+ row: str = f"{i + 1}) "
+ row += f"{fp}"
+ print(row)
+ val = None
+ prompt: str = f"Choose a simulator key [1-{len(fingerprints)}] ('q' to quit, or 'g' to generate a new key): "
+ while val is None:
+ val = input(prompt)
+ if val == "q":
+ return None
+ elif val == "g":
+ fingerprint = generate_and_return_fingerprint()
+ break
+ elif not val.isdigit():
+ val = None
+ else:
+ index = int(val) - 1
+ if index < 0 or index >= len(fingerprints):
+ print("Invalid value")
+ val = None
+ continue
+ else:
+ fingerprint = fingerprints[index]
+ assert fingerprint is not None
+ return fingerprint
+
+
+async def generate_plots(config: Dict[str, Any], root_path: Path, fingerprint: int, bitfield: bool) -> None:
+ """
+ Pre-Generate plots for the new simulator instance.
+ """
+
+ from chia.simulator.block_tools import BlockTools, test_constants
+ from chia.simulator.start_simulator import PLOT_SIZE, PLOTS
+
+ farming_puzzle_hash = decode_puzzle_hash(config["simulator"]["farming_address"])
+ os.environ["CHIA_ROOT"] = str(root_path) # change env variable, to make it match what the daemon would set it to
+
+ # create block tools and use local keychain
+ bt = BlockTools(
+ test_constants,
+ root_path,
+ automated_testing=False,
+ plot_dir=config["simulator"].get("plot_directory", "plots"),
+ keychain=Keychain(),
+ )
+ await bt.setup_keys(fingerprint=fingerprint, reward_ph=farming_puzzle_hash)
+ existing_plots = await bt.setup_plots(
+ num_og_plots=PLOTS, num_pool_plots=0, num_non_keychain_plots=0, plot_size=PLOT_SIZE, bitfield=bitfield
+ )
+ print(f"{'New plots generated.' if existing_plots else 'Using Existing Plots'}\n")
+
+
+async def get_current_height(root_path: Path) -> int:
+ async with get_any_service_client(SimulatorFullNodeRpcClient, root_path=root_path, consume_errors=False) as (
+ node_client,
+ _,
+ ):
+ assert node_client is not None # this cant be None, because we don't catch errors
+ num_blocks = len(await node_client.get_all_blocks())
+ return num_blocks
+
+
+async def async_config_wizard(
+ root_path: Path,
+ fingerprint: Optional[int],
+ farming_address: Optional[str],
+ plot_directory: Optional[str],
+ mnemonic_string: Optional[str],
+ auto_farm: Optional[bool],
+ docker_mode: bool,
+ bitfield: bool,
+) -> None:
+ # either return passed through fingerprint or get one
+ fingerprint = select_fingerprint(fingerprint, mnemonic_string, docker_mode)
+ if fingerprint is None:
+ # user cancelled wizard
+ return
+ # create chia directory & get config.
+ print("Creating chia directory & config...")
+ config = create_chia_directory(root_path, fingerprint, farming_address, plot_directory, auto_farm, docker_mode)
+ # Pre-generate plots by running block_tools init functions.
+ print("Please Wait, Generating plots...")
+ print("This may take up to a minute if you are on a slow machine")
+
+ await generate_plots(config, root_path, fingerprint, bitfield)
+ # final messages
+ final_farming_address = config["simulator"]["farming_address"]
+ print(f"\nFarming & Prefarm reward address: {final_farming_address}\n")
+ print("Configuration Wizard Complete.")
+ print("Starting Simulator now...\n\n")
+
+ sys.argv[0] = str(Path(sys.executable).parent / "chia") # fix path for tests
+ await async_start(root_path, config, ("simulator",), False)
+
+ # now we make sure the simulator has a genesis block
+ print("Please wait, generating genesis block.")
+ while True:
+ try:
+ num_blocks: int = await get_current_height(root_path)
+ except ClientConnectorError:
+ await asyncio.sleep(0.25)
+ else:
+ if num_blocks == 0:
+ await farm_blocks(None, root_path, 1, True, final_farming_address)
+ print("Genesis block generated, exiting.")
+ else:
+ print("Genesis block already exists, exiting.")
+ break
+ print(f"\nMake sure your CHIA_ROOT Environment Variable is set to: {root_path}")
+
+
+def print_coin_record(
+ name: str,
+ address_prefix: str,
+ coin_record: CoinRecord,
+) -> None:
+ from datetime import datetime
+
+ coin_address = encode_puzzle_hash(coin_record.coin.puzzle_hash, address_prefix)
+ print(f"Coin 0x{coin_record.name.hex()}")
+ print(f"Wallet Address: {coin_address}")
+ print(f"Confirmed at block: {coin_record.confirmed_block_index}")
+ print(f"Spent: {f'at Block {coin_record.spent_block_index}' if coin_record.spent else 'No'}")
+ print(f"Coin Amount: {coin_record.coin.amount} {name}")
+ print(f"Parent Coin ID: 0x{coin_record.coin.parent_coin_info.hex()}")
+ print(f"Created at: {datetime.fromtimestamp(float(coin_record.timestamp)).strftime('%Y-%m-%d %H:%M:%S')}\n")
+
+
+async def print_coin_records(
+ config: Dict[str, Any],
+ node_client: SimulatorFullNodeRpcClient,
+ include_reward_coins: bool,
+ include_spent: bool = False,
+) -> None:
+ import sys
+
+ coin_records: List[CoinRecord] = await node_client.get_all_coins(include_spent)
+ coin_records = [coin_record for coin_record in coin_records if not coin_record.coinbase or include_reward_coins]
+ address_prefix = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"]
+ name = "mojo"
+ paginate = False # I might change this later.
+ if len(coin_records) != 0:
+ print("All Coins: ")
+ if paginate is True:
+ paginate = sys.stdout.isatty()
+ num_per_screen = 5 if paginate else len(coin_records)
+ # ripped from cmds/wallet_funcs.
+ for i in range(0, len(coin_records), num_per_screen):
+ for j in range(0, num_per_screen):
+ if i + j >= len(coin_records):
+ break
+ print_coin_record(
+ coin_record=coin_records[i + j],
+ name=name,
+ address_prefix=address_prefix,
+ )
+ if i + num_per_screen <= len(coin_records) and paginate:
+ print("Press q to quit, or c to continue")
+ while True:
+ entered_key = sys.stdin.read(1)
+ if entered_key == "q":
+ return None
+ elif entered_key == "c":
+ break
+
+
+async def print_wallets(config: Dict[str, Any], node_client: SimulatorFullNodeRpcClient) -> None:
+ ph_and_amount = await node_client.get_all_puzzle_hashes()
+ address_prefix = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"]
+ name = "mojo"
+ for puzzle_hash, (amount, num_tx) in ph_and_amount.items():
+ address = encode_puzzle_hash(puzzle_hash, address_prefix)
+ print(f"Address: {address} has a balance of: {amount} {name}, with a total of: {num_tx} transactions.\n")
+
+
+async def print_status(
+ rpc_port: Optional[int],
+ root_path: Path,
+ fingerprint: Optional[int],
+ show_key: bool,
+ show_coins: bool,
+ include_reward_coins: bool,
+ show_addresses: bool,
+) -> None:
+ """
+ This command allows users to easily get the status of the simulator
+ and information about the state of and the coins in the simulated blockchain.
+ """
+ from chia.cmds.show_funcs import print_blockchain_state
+ from chia.cmds.units import units
+
+ async with get_any_service_client(SimulatorFullNodeRpcClient, rpc_port, root_path) as (node_client, config):
+ if node_client is not None:
+ # Display keychain info
+ if show_key:
+ if fingerprint is None:
+ fingerprint = config["simulator"]["key_fingerprint"]
+ if fingerprint is not None:
+ display_key_info(
+ fingerprint, config["network_overrides"]["config"][config["selected_network"]]["address_prefix"]
+ )
+ else:
+ print(
+ "No fingerprint in config, either rerun 'cdv sim create' "
+ "or use --fingerprint to specify one, skipping key information."
+ )
+ # chain status ( basically chia show -s)
+ await print_blockchain_state(node_client, config)
+ print("")
+ # farming information
+ target_ph: bytes32 = await node_client.get_farming_ph()
+ farming_coin_records = await node_client.get_coin_records_by_puzzle_hash(target_ph, False)
+ prefix = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"]
+ print(
+ f"Current Farming address: {encode_puzzle_hash(target_ph, prefix)}, "
+ f"with a balance of: "
+ f"{sum(coin_records.coin.amount for coin_records in farming_coin_records) / units['chia']} TXCH."
+ )
+ if show_addresses:
+ print("All Addresses: ")
+ await print_wallets(config, node_client)
+ if show_coins:
+ await print_coin_records(config, node_client, include_reward_coins)
+
+
+async def revert_block_height(
+ rpc_port: Optional[int],
+ root_path: Path,
+ num_blocks: int,
+ num_new_blocks: int,
+ reset_chain_to_genesis: bool,
+ use_revert_blocks: bool,
+) -> None:
+ """
+ This function allows users to easily revert the chain to a previous state or perform a reorg.
+ """
+ async with get_any_service_client(SimulatorFullNodeRpcClient, rpc_port, root_path) as (node_client, _):
+ if node_client is not None:
+ if use_revert_blocks:
+ if num_new_blocks != 1:
+ print(f"Ignoring num_new_blocks: {num_new_blocks}, because we are not performing a reorg.")
+ # in this case num_blocks is the number of blocks to delete
+ new_height: int = await node_client.revert_blocks(num_blocks, reset_chain_to_genesis)
+ print(
+ f"All transactions in Block: {new_height + num_blocks} and above were successfully deleted, "
+ "you should now delete & restart all wallets."
+ )
+ else:
+ # However, in this case num_blocks is the fork height.
+ new_height = await node_client.reorg_blocks(num_blocks, num_new_blocks, use_revert_blocks)
+ old_height = new_height - num_new_blocks
+ print(f"All transactions in Block: {old_height - num_blocks} and above were successfully reverted.")
+ print(f"Block Height is now: {new_height}")
+
+
+async def farm_blocks(
+ rpc_port: Optional[int],
+ root_path: Path,
+ num_blocks: int,
+ transaction_blocks: bool,
+ target_address: str,
+) -> None:
+ """
+ This function is used to generate new blocks.
+ """
+ async with get_any_service_client(SimulatorFullNodeRpcClient, rpc_port, root_path) as (node_client, config):
+ if node_client is not None:
+ if target_address == "":
+ target_address = config["simulator"]["farming_address"]
+ if target_address is None:
+ print(
+ "No target address in config, falling back to the temporary address currently in use. "
+ "You can use 'cdv sim create' or use --target-address to specify a different address."
+ )
+ target_ph: bytes32 = await node_client.get_farming_ph()
+ else:
+ target_ph = decode_puzzle_hash(target_address)
+ await node_client.farm_block(target_ph, num_blocks, transaction_blocks)
+ print(f"Farmed {num_blocks}{' Transaction' if transaction_blocks else ''} blocks")
+ block_height = (await node_client.get_blockchain_state())["peak"].height
+ print(f"Block Height is now: {block_height}")
+
+
+async def set_auto_farm(rpc_port: Optional[int], root_path: Path, set_autofarm: bool) -> None:
+ """
+ This function can be used to enable or disable Auto Farming.
+ """
+ async with get_any_service_client(SimulatorFullNodeRpcClient, rpc_port, root_path) as (node_client, _):
+ if node_client is not None:
+ current = await node_client.get_auto_farming()
+ if current == set_autofarm:
+ print(f"Auto farming is already {'on' if set_autofarm else 'off'}")
+ return
+ result = await node_client.set_auto_farming(set_autofarm)
+ print(f"Auto farming is now {'on' if result else 'off'}")
diff --git a/chia/cmds/start.py b/chia/cmds/start.py
--- a/chia/cmds/start.py
+++ b/chia/cmds/start.py
@@ -6,11 +6,11 @@
from chia.util.service_groups import all_groups
-@click.command("start", short_help="Start service groups")
+@click.command("start", help="Start service groups")
@click.option("-r", "--restart", is_flag=True, type=bool, help="Restart running services")
@click.argument("group", type=click.Choice(list(all_groups())), nargs=-1, required=True)
@click.pass_context
-def start_cmd(ctx: click.Context, restart: bool, group: str) -> None:
+def start_cmd(ctx: click.Context, restart: bool, group: tuple[str, ...]) -> None:
import asyncio
from chia.cmds.beta_funcs import warn_if_beta_enabled
diff --git a/chia/cmds/start_funcs.py b/chia/cmds/start_funcs.py
--- a/chia/cmds/start_funcs.py
+++ b/chia/cmds/start_funcs.py
@@ -47,7 +47,7 @@ async def create_start_daemon_connection(root_path: Path, config: Dict[str, Any]
passphrase = None
if await connection.is_keyring_locked():
passphrase = Keychain.get_cached_master_passphrase()
- if not Keychain.master_passphrase_is_valid(passphrase):
+ if passphrase is None or not Keychain.master_passphrase_is_valid(passphrase):
with ThreadPoolExecutor(max_workers=1, thread_name_prefix="get_current_passphrase") as executor:
passphrase = await asyncio.get_running_loop().run_in_executor(executor, get_current_passphrase)
@@ -59,7 +59,7 @@ async def create_start_daemon_connection(root_path: Path, config: Dict[str, Any]
return None
-async def async_start(root_path: Path, config: Dict[str, Any], group: str, restart: bool) -> None:
+async def async_start(root_path: Path, config: Dict[str, Any], group: tuple[str, ...], restart: bool) -> None:
try:
daemon = await create_start_daemon_connection(root_path, config)
except KeychainMaxUnlockAttempts:
diff --git a/chia/cmds/stop.py b/chia/cmds/stop.py
--- a/chia/cmds/stop.py
+++ b/chia/cmds/stop.py
@@ -11,7 +11,7 @@
from chia.util.service_groups import all_groups, services_for_groups
-async def async_stop(root_path: Path, config: Dict[str, Any], group: str, stop_daemon: bool) -> int:
+async def async_stop(root_path: Path, config: Dict[str, Any], group: tuple[str, ...], stop_daemon: bool) -> int:
from chia.daemon.client import connect_to_daemon_and_validate
daemon = await connect_to_daemon_and_validate(root_path, config)
@@ -47,11 +47,11 @@ async def async_stop(root_path: Path, config: Dict[str, Any], group: str, stop_d
return return_val
-@click.command("stop", short_help="Stop services")
+@click.command("stop", help="Stop services")
@click.option("-d", "--daemon", is_flag=True, type=bool, help="Stop daemon")
@click.argument("group", type=click.Choice(list(all_groups())), nargs=-1, required=True)
@click.pass_context
-def stop_cmd(ctx: click.Context, daemon: bool, group: str) -> None:
+def stop_cmd(ctx: click.Context, daemon: bool, group: tuple[str, ...]) -> None:
from chia.cmds.beta_funcs import warn_if_beta_enabled
root_path = ctx.obj["root_path"]
diff --git a/chia/cmds/wallet.py b/chia/cmds/wallet.py
--- a/chia/cmds/wallet.py
+++ b/chia/cmds/wallet.py
@@ -14,13 +14,13 @@
from chia.wallet.util.wallet_types import WalletType
-@click.group("wallet", short_help="Manage your wallet")
+@click.group("wallet", help="Manage your wallet")
@click.pass_context
def wallet_cmd(ctx: click.Context) -> None:
pass
-@wallet_cmd.command("get_transaction", short_help="Get a transaction")
+@wallet_cmd.command("get_transaction", help="Get a transaction")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -28,7 +28,7 @@ def wallet_cmd(ctx: click.Context) -> None:
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="Id of the wallet to use", type=int, default=1, show_default=True, required=True)
@click.option("-tx", "--tx_id", help="transaction id to search for", type=str, required=True)
@click.option("--verbose", "-v", count=True, type=int)
@@ -41,7 +41,7 @@ def get_transaction_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: in
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, get_transaction))
-@wallet_cmd.command("get_transactions", short_help="Get all transactions")
+@wallet_cmd.command("get_transactions", help="Get all transactions")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -49,7 +49,7 @@ def get_transaction_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: in
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="Id of the wallet to use", type=int, default=1, show_default=True, required=True)
@click.option(
"-o",
@@ -132,7 +132,7 @@ def get_transactions_cmd(
sys.stdout.close()
-@wallet_cmd.command("send", short_help="Send chia to another wallet")
+@wallet_cmd.command("send", help="Send chia to another wallet")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -140,7 +140,7 @@ def get_transactions_cmd(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="Id of the wallet to use", type=int, default=1, show_default=True, required=True)
@click.option("-a", "--amount", help="How much chia to send, in XCH", type=str, required=True)
@click.option("-e", "--memo", help="Additional memo for the transaction", type=str, default=None)
@@ -180,7 +180,6 @@ def get_transactions_cmd(
help="Exclude this coin from being spent.",
)
@click.option(
- "-r",
"--reuse",
help="Reuse existing address for the change.",
is_flag=True,
@@ -219,7 +218,7 @@ def send_cmd(
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, send))
-@wallet_cmd.command("show", short_help="Show wallet information")
+@wallet_cmd.command("show", help="Show wallet information")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -227,7 +226,7 @@ def send_cmd(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option(
"-w",
"--wallet_type",
@@ -246,7 +245,7 @@ def show_cmd(wallet_rpc_port: Optional[int], fingerprint: int, wallet_type: Opti
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, args, print_balances))
-@wallet_cmd.command("get_address", short_help="Get a wallet receive address")
+@wallet_cmd.command("get_address", help="Get a wallet receive address")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -255,7 +254,7 @@ def show_cmd(wallet_rpc_port: Optional[int], fingerprint: int, wallet_type: Opti
default=None,
)
@click.option("-i", "--id", help="Id of the wallet to use", type=int, default=1, show_default=True, required=True)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option(
"-n/-l",
"--new-address/--latest-address",
@@ -275,9 +274,7 @@ def get_address_cmd(wallet_rpc_port: Optional[int], id, fingerprint: int, new_ad
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, get_address))
-@wallet_cmd.command(
- "delete_unconfirmed_transactions", short_help="Deletes all unconfirmed transactions for this wallet ID"
-)
+@wallet_cmd.command("delete_unconfirmed_transactions", help="Deletes all unconfirmed transactions for this wallet ID")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -286,7 +283,7 @@ def get_address_cmd(wallet_rpc_port: Optional[int], id, fingerprint: int, new_ad
default=None,
)
@click.option("-i", "--id", help="Id of the wallet to use", type=int, default=1, show_default=True, required=True)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
def delete_unconfirmed_transactions_cmd(wallet_rpc_port: Optional[int], id, fingerprint: int) -> None:
extra_params = {"id": id}
import asyncio
@@ -296,7 +293,7 @@ def delete_unconfirmed_transactions_cmd(wallet_rpc_port: Optional[int], id, fing
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, delete_unconfirmed_transactions))
-@wallet_cmd.command("get_derivation_index", short_help="Get the last puzzle hash derivation path index")
+@wallet_cmd.command("get_derivation_index", help="Get the last puzzle hash derivation path index")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -304,7 +301,7 @@ def delete_unconfirmed_transactions_cmd(wallet_rpc_port: Optional[int], id, fing
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
def get_derivation_index_cmd(wallet_rpc_port: Optional[int], fingerprint: int) -> None:
extra_params: Dict[str, Any] = {}
import asyncio
@@ -314,7 +311,7 @@ def get_derivation_index_cmd(wallet_rpc_port: Optional[int], fingerprint: int) -
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, get_derivation_index))
-@wallet_cmd.command("sign_message", short_help="Sign a message by a derivation address")
+@wallet_cmd.command("sign_message", help="Sign a message by a derivation address")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -322,7 +319,7 @@ def get_derivation_index_cmd(wallet_rpc_port: Optional[int], fingerprint: int) -
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-a", "--address", help="The address you want to use for signing", type=str, required=True)
@click.option("-m", "--hex_message", help="The hex message you want sign", type=str, required=True)
def address_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, address: str, hex_message: str) -> None:
@@ -335,7 +332,7 @@ def address_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, addre
@wallet_cmd.command(
- "update_derivation_index", short_help="Generate additional derived puzzle hashes starting at the provided index"
+ "update_derivation_index", help="Generate additional derived puzzle hashes starting at the provided index"
)
@click.option(
"-wp",
@@ -344,7 +341,7 @@ def address_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, addre
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option(
"-i", "--index", help="Index to set. Must be greater than the current derivation index", type=int, required=True
)
@@ -357,7 +354,7 @@ def update_derivation_index_cmd(wallet_rpc_port: Optional[int], fingerprint: int
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, update_derivation_index))
-@wallet_cmd.command("add_token", short_help="Add/Rename a CAT to the wallet by its asset ID")
+@wallet_cmd.command("add_token", help="Add/Rename a CAT to the wallet by its asset ID")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -392,7 +389,7 @@ def add_token_cmd(wallet_rpc_port: Optional[int], asset_id: str, token_name: str
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, add_token))
-@wallet_cmd.command("make_offer", short_help="Create an offer of XCH/CATs/NFTs for XCH/CATs/NFTs")
+@wallet_cmd.command("make_offer", help="Create an offer of XCH/CATs/NFTs for XCH/CATs/NFTs")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -400,7 +397,7 @@ def add_token_cmd(wallet_rpc_port: Optional[int], asset_id: str, token_name: str
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option(
"-o",
"--offer",
@@ -420,7 +417,6 @@ def add_token_cmd(wallet_rpc_port: Optional[int], asset_id: str, token_name: str
"-m", "--fee", help="A fee to add to the offer when it gets taken, in XCH", default="0", show_default=True
)
@click.option(
- "-r",
"--reuse",
help="Reuse existing address for the offer.",
is_flag=True,
@@ -450,7 +446,7 @@ def make_offer_cmd(
@wallet_cmd.command(
- "get_offers", short_help="Get the status of existing offers. Displays only active/pending offers by default."
+ "get_offers", help="Get the status of existing offers. Displays only active/pending offers by default."
)
@click.option(
"-wp",
@@ -459,7 +455,7 @@ def make_offer_cmd(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-id", "--id", help="The ID of the offer that you wish to examine")
@click.option("-p", "--filepath", help="The path to rewrite the offer file to (must be used in conjunction with --id)")
@click.option("-em", "--exclude-my-offers", help="Exclude your own offers from the output", is_flag=True)
@@ -496,7 +492,7 @@ def get_offers_cmd(
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, get_offers))
-@wallet_cmd.command("take_offer", short_help="Examine or take an offer")
+@wallet_cmd.command("take_offer", help="Examine or take an offer")
@click.argument("path_or_hex", type=str, nargs=1, required=True)
@click.option(
"-wp",
@@ -505,13 +501,12 @@ def get_offers_cmd(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-e", "--examine-only", help="Print the summary of the offer file but do not take it", is_flag=True)
@click.option(
"-m", "--fee", help="The fee to use when pushing the completed offer, in XCH", default="0", show_default=True
)
@click.option(
- "-r",
"--reuse",
help="Reuse existing address for the offer.",
is_flag=True,
@@ -538,7 +533,7 @@ def take_offer_cmd(
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, take_offer))
-@wallet_cmd.command("cancel_offer", short_help="Cancel an existing offer")
+@wallet_cmd.command("cancel_offer", help="Cancel an existing offer")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -546,7 +541,7 @@ def take_offer_cmd(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-id", "--id", help="The offer ID that you wish to cancel", required=True)
@click.option("--insecure", help="Don't make an on-chain transaction, simply mark the offer as cancelled", is_flag=True)
@click.option(
@@ -576,12 +571,12 @@ def check_wallet_cmd(ctx: click.Context, db_path: str, verbose: bool) -> None:
asyncio.run(scan(ctx.obj["root_path"], db_path, verbose=verbose))
-@wallet_cmd.group("did", short_help="DID related actions")
+@wallet_cmd.group("did", help="DID related actions")
def did_cmd():
pass
-@did_cmd.command("create", short_help="Create DID wallet")
+@did_cmd.command("create", help="Create DID wallet")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -589,7 +584,7 @@ def did_cmd():
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-n", "--name", help="Set the DID wallet name", type=str)
@click.option(
"-a",
@@ -619,7 +614,7 @@ def did_create_wallet_cmd(
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, create_did_wallet))
-@did_cmd.command("sign_message", short_help="Sign a message by a DID")
+@did_cmd.command("sign_message", help="Sign a message by a DID")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -627,7 +622,7 @@ def did_create_wallet_cmd(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--did_id", help="DID ID you want to use for signing", type=str, required=True)
@click.option("-m", "--hex_message", help="The hex message you want to sign", type=str, required=True)
def did_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, did_id: str, hex_message: str) -> None:
@@ -639,7 +634,7 @@ def did_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, did_id: s
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, sign_message))
-@did_cmd.command("set_name", short_help="Set DID wallet name")
+@did_cmd.command("set_name", help="Set DID wallet name")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -647,7 +642,7 @@ def did_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, did_id: s
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="Id of the wallet to use", type=int, required=True)
@click.option("-n", "--name", help="Set the DID wallet name", type=str, required=True)
def did_wallet_name_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int, name: str) -> None:
@@ -659,7 +654,7 @@ def did_wallet_name_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: in
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, did_set_wallet_name))
-@did_cmd.command("get_did", short_help="Get DID from wallet")
+@did_cmd.command("get_did", help="Get DID from wallet")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -667,7 +662,7 @@ def did_wallet_name_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: in
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="Id of the wallet to use", type=int, required=True)
def did_get_did_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int) -> None:
import asyncio
@@ -678,12 +673,218 @@ def did_get_did_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int) -
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, get_did))
-@wallet_cmd.group("nft", short_help="NFT related actions")
+@did_cmd.command("get_details", help="Get more details of any DID")
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-id", "--coin_id", help="Id of the DID or any coin ID of the DID", type=str, required=True)
+@click.option("-l", "--latest", help="Return latest DID information", is_flag=True, default=True)
+def did_get_details_cmd(wallet_rpc_port: Optional[int], fingerprint: int, coin_id: str, latest: bool) -> None:
+ import asyncio
+
+ from .wallet_funcs import get_did_info
+
+ extra_params = {"coin_id": coin_id, "latest": latest}
+ asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, get_did_info))
+
+
+@did_cmd.command("update_metadata", help="Update the metadata of a DID")
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--id", help="Id of the DID wallet to use", type=int, required=True)
+@click.option("-d", "--metadata", help="The new whole metadata in json format", type=str, required=True)
+@click.option(
+ "--reuse",
+ help="Reuse existing address for the change.",
+ is_flag=True,
+ default=False,
+)
+def did_update_metadata_cmd(
+ wallet_rpc_port: Optional[int], fingerprint: int, id: int, metadata: str, reuse: bool
+) -> None:
+ import asyncio
+
+ from .wallet_funcs import update_did_metadata
+
+ extra_params = {"did_wallet_id": id, "metadata": metadata, "reuse_puzhash": reuse}
+ asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, update_did_metadata))
+
+
+@did_cmd.command("find_lost", help="Find the did you should own and recovery the DID wallet")
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-id", "--coin_id", help="Id of the DID or any coin ID of the DID", type=str, required=True)
+@click.option("-m", "--metadata", help="The new whole metadata in json format", type=str, required=False)
+@click.option(
+ "-r",
+ "--recovery_list_hash",
+ help="Override the recovery list hash of the DID. Only set this if your last DID spend updated the recovery list",
+ type=str,
+ required=False,
+)
+@click.option(
+ "-n",
+ "--num_verification",
+ help="Override the required verification number of the DID."
+ " Only set this if your last DID spend updated the required verification number",
+ type=int,
+ required=False,
+)
+def did_find_lost_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ coin_id: str,
+ metadata: Optional[str],
+ recovery_list_hash: Optional[str],
+ num_verification: Optional[int],
+) -> None:
+ import asyncio
+
+ from .wallet_funcs import find_lost_did
+
+ extra_params = {
+ "coin_id": coin_id,
+ "metadata": metadata,
+ "recovery_list_hash": recovery_list_hash,
+ "num_verification": num_verification,
+ }
+ asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, find_lost_did))
+
+
+@did_cmd.command("message_spend", help="Generate a DID spend bundle for announcements")
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--id", help="Id of the DID wallet to use", type=int, required=True)
+@click.option(
+ "-pa",
+ "--puzzle_announcements",
+ help="The list of puzzle announcement hex strings, split by comma (,)",
+ type=str,
+ required=False,
+)
+@click.option(
+ "-ca",
+ "--coin_announcements",
+ help="The list of coin announcement hex strings, split by comma (,)",
+ type=str,
+ required=False,
+)
+def did_message_spend_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ id: int,
+ puzzle_announcements: Optional[str],
+ coin_announcements: Optional[str],
+) -> None:
+ import asyncio
+
+ from .wallet_funcs import did_message_spend
+
+ puzzle_list: List[str] = []
+ coin_list: List[str] = []
+ if puzzle_announcements is not None:
+ try:
+ puzzle_list = puzzle_announcements.split(",")
+ # validate puzzle announcements is list of hex strings
+ for announcement in puzzle_list:
+ bytes.fromhex(announcement)
+ except ValueError:
+ print("Invalid puzzle announcement format, should be a list of hex strings.")
+ return
+ if coin_announcements is not None:
+ try:
+ coin_list = coin_announcements.split(",")
+ # validate that coin announcements is a list of hex strings
+ for announcement in coin_list:
+ bytes.fromhex(announcement)
+ except ValueError:
+ print("Invalid coin announcement format, should be a list of hex strings.")
+ return
+ extra_params = {"did_wallet_id": id, "puzzle_announcements": puzzle_list, "coin_announcements": coin_list}
+ asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, did_message_spend))
+
+
+@did_cmd.command("transfer", help="Transfer a DID")
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--id", help="Id of the DID wallet to use", type=int, required=True)
+@click.option("-ta", "--target-address", help="Target recipient wallet address", type=str, required=True)
+@click.option(
+ "-rr", "--reset_recovery", help="If you want to reset the recovery DID settings.", is_flag=True, default=False
+)
+@click.option(
+ "-m",
+ "--fee",
+ help="Set the fees per transaction, in XCH.",
+ type=str,
+ default="0",
+ show_default=True,
+ callback=validate_fee,
+)
+@click.option(
+ "--reuse",
+ help="Reuse existing address for the change.",
+ is_flag=True,
+ default=False,
+)
+def did_transfer_did(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ id: int,
+ target_address: str,
+ reset_recovery: bool,
+ fee: str,
+ reuse: bool,
+) -> None:
+ import asyncio
+
+ from .wallet_funcs import transfer_did
+
+ extra_params = {
+ "did_wallet_id": id,
+ "with_recovery": reset_recovery is False,
+ "target_address": target_address,
+ "fee": fee,
+ "reuse_puzhash": True if reuse else None,
+ }
+ asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, transfer_did))
+
+
+@wallet_cmd.group("nft", help="NFT related actions")
def nft_cmd():
pass
-@nft_cmd.command("create", short_help="Create an NFT wallet")
+@nft_cmd.command("create", help="Create an NFT wallet")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -691,7 +892,7 @@ def nft_cmd():
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-di", "--did-id", help="DID Id to use", type=str)
@click.option("-n", "--name", help="Set the NFT wallet name", type=str)
def nft_wallet_create_cmd(
@@ -705,7 +906,7 @@ def nft_wallet_create_cmd(
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, create_nft_wallet))
-@nft_cmd.command("sign_message", short_help="Sign a message by a NFT")
+@nft_cmd.command("sign_message", help="Sign a message by a NFT")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -713,7 +914,7 @@ def nft_wallet_create_cmd(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--nft_id", help="NFT ID you want to use for signing", type=str, required=True)
@click.option("-m", "--hex_message", help="The hex message you want to sign", type=str, required=True)
def nft_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, nft_id: str, hex_message: str) -> None:
@@ -725,7 +926,7 @@ def nft_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, nft_id: s
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, sign_message))
-@nft_cmd.command("mint", short_help="Mint an NFT")
+@nft_cmd.command("mint", help="Mint an NFT")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -733,7 +934,7 @@ def nft_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, nft_id: s
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="Id of the NFT wallet to use", type=int, required=True)
@click.option("-ra", "--royalty-address", help="Royalty address", type=str)
@click.option("-ta", "--target-address", help="Target address", type=str)
@@ -764,7 +965,6 @@ def nft_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, nft_id: s
show_default=True,
)
@click.option(
- "-r",
"--reuse",
help="Reuse existing address for the change.",
is_flag=True,
@@ -823,7 +1023,7 @@ def nft_mint_cmd(
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, mint_nft))
-@nft_cmd.command("add_uri", short_help="Add an URI to an NFT")
+@nft_cmd.command("add_uri", help="Add an URI to an NFT")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -831,7 +1031,7 @@ def nft_mint_cmd(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="Id of the NFT wallet to use", type=int, required=True)
@click.option("-ni", "--nft-coin-id", help="Id of the NFT coin to add the URI to", type=str, required=True)
@click.option("-u", "--uri", help="URI to add to the NFT", type=str)
@@ -847,7 +1047,6 @@ def nft_mint_cmd(
callback=validate_fee,
)
@click.option(
- "-r",
"--reuse",
help="Reuse existing address for the change.",
is_flag=True,
@@ -880,7 +1079,7 @@ def nft_add_uri_cmd(
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, add_uri_to_nft))
-@nft_cmd.command("transfer", short_help="Transfer an NFT")
+@nft_cmd.command("transfer", help="Transfer an NFT")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -888,7 +1087,7 @@ def nft_add_uri_cmd(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="Id of the NFT wallet to use", type=int, required=True)
@click.option("-ni", "--nft-coin-id", help="Id of the NFT coin to transfer", type=str, required=True)
@click.option("-ta", "--target-address", help="Target recipient wallet address", type=str, required=True)
@@ -902,7 +1101,6 @@ def nft_add_uri_cmd(
callback=validate_fee,
)
@click.option(
- "-r",
"--reuse",
help="Reuse existing address for the change.",
is_flag=True,
@@ -931,7 +1129,7 @@ def nft_transfer_cmd(
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, transfer_nft))
-@nft_cmd.command("list", short_help="List the current NFTs")
+@nft_cmd.command("list", help="List the current NFTs")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -939,7 +1137,7 @@ def nft_transfer_cmd(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="Id of the NFT wallet to use", type=int, required=True)
def nft_list_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int) -> None:
import asyncio
@@ -950,7 +1148,7 @@ def nft_list_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int) -> N
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, list_nfts))
-@nft_cmd.command("set_did", short_help="Set a DID on an NFT")
+@nft_cmd.command("set_did", help="Set a DID on an NFT")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -958,7 +1156,7 @@ def nft_list_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int) -> N
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="Id of the NFT wallet to use", type=int, required=True)
@click.option("-di", "--did-id", help="DID Id to set on the NFT", type=str, required=True)
@click.option("-ni", "--nft-coin-id", help="Id of the NFT coin to set the DID on", type=str, required=True)
@@ -972,7 +1170,6 @@ def nft_list_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int) -> N
callback=validate_fee,
)
@click.option(
- "-r",
"--reuse",
help="Reuse existing address for the change.",
is_flag=True,
@@ -1001,7 +1198,7 @@ def nft_set_did_cmd(
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, set_nft_did))
-@nft_cmd.command("get_info", short_help="Get NFT information")
+@nft_cmd.command("get_info", help="Get NFT information")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -1009,7 +1206,7 @@ def nft_set_did_cmd(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-ni", "--nft-coin-id", help="Id of the NFT coin to get information on", type=str, required=True)
def nft_get_info_cmd(
wallet_rpc_port: Optional[int],
@@ -1030,12 +1227,12 @@ def nft_get_info_cmd(
wallet_cmd.add_command(coins_cmd)
-@wallet_cmd.group("notifications", short_help="Send/Manage notifications")
+@wallet_cmd.group("notifications", help="Send/Manage notifications")
def notification_cmd():
pass
-@notification_cmd.command("send", short_help="Send a notification to the owner of an address")
+@notification_cmd.command("send", help="Send a notification to the owner of an address")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -1043,7 +1240,7 @@ def notification_cmd():
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-t", "--to-address", help="The address to send the notification to", type=str, required=True)
@click.option(
"-a",
@@ -1079,7 +1276,7 @@ def _send_notification(
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, send_notification))
-@notification_cmd.command("get", short_help="Get notification(s) that are in your wallet")
+@notification_cmd.command("get", help="Get notification(s) that are in your wallet")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -1087,7 +1284,7 @@ def _send_notification(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="The specific notification ID to show", type=str, default=[], multiple=True)
@click.option("-s", "--start", help="The number of notifications to skip", type=int, default=None)
@click.option("-e", "--end", help="The number of notifications to stop at", type=int, default=None)
@@ -1112,7 +1309,7 @@ def _get_notifications(
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, get_notifications))
-@notification_cmd.command("delete", short_help="Delete notification(s) that are in your wallet")
+@notification_cmd.command("delete", help="Delete notification(s) that are in your wallet")
@click.option(
"-wp",
"--wallet-rpc-port",
@@ -1120,7 +1317,7 @@ def _get_notifications(
type=int,
default=None,
)
-@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which wallet to use", type=int)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
@click.option("-i", "--id", help="A specific notification ID to delete", type=str, multiple=True)
@click.option("--all", help="All notifications can be deleted (they will be recovered during resync)", is_flag=True)
def _delete_notifications(
@@ -1140,3 +1337,220 @@ def _delete_notifications(
"all": all,
}
asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, delete_notifications))
+
+
+@wallet_cmd.group("vcs", short_help="Verifiable Credential related actions")
+def vcs_cmd(): # pragma: no cover
+ pass
+
+
+@vcs_cmd.command("mint", short_help="Mint a VC")
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-d", "--did", help="The DID of the VC's proof provider", type=str, required=True)
+@click.option("-t", "--target-address", help="The address to send the VC to once it's minted", type=str, required=False)
+@click.option("-m", "--fee", help="Blockchain fee for mint transaction", type=str, required=False)
+def _mint_vc(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ did: str,
+ target_address: Optional[str],
+ fee: Optional[str],
+) -> None: # pragma: no cover
+ import asyncio
+
+ from chia.cmds.cmds_util import execute_with_wallet
+
+ from .wallet_funcs import mint_vc
+
+ extra_params = {
+ "did": did,
+ "target_address": target_address,
+ "fee": fee,
+ }
+ asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, mint_vc))
+
+
+@vcs_cmd.command("get", short_help="Get a list of existing VCs")
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option(
+ "-s", "--start", help="The index to start the list at", type=int, required=False, default=0, show_default=True
+)
+@click.option(
+ "-c", "--count", help="How many results to return", type=int, required=False, default=50, show_default=True
+)
+def _get_vcs(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ start: int,
+ count: int,
+) -> None: # pragma: no cover
+ import asyncio
+
+ from chia.cmds.cmds_util import execute_with_wallet
+
+ from .wallet_funcs import get_vcs
+
+ extra_params = {
+ "start": start,
+ "count": count,
+ }
+ asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, get_vcs))
+
+
+@vcs_cmd.command("update_proofs", short_help="Update a VC's proofs if you have the provider DID")
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-l", "--vc-id", help="The launcher ID of the VC whose proofs should be updated", type=str, required=True)
+@click.option(
+ "-t",
+ "--new-puzhash",
+ help="The address to send the VC after the proofs have been updated",
+ type=str,
+ required=False,
+)
+@click.option("-p", "--new-proof-hash", help="The new proof hash to update the VC to", type=str, required=True)
+@click.option("-m", "--fee", help="Blockchain fee for update transaction", type=str, required=False)
+@click.option(
+ "--reuse-puzhash/--generate-new-puzhash",
+ help="Send the VC back to the same puzzle hash it came from (ignored if --new-puzhash is specified)",
+ default=False,
+ show_default=True,
+)
+def _spend_vc(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ vc_id: str,
+ new_puzhash: Optional[str],
+ new_proof_hash: str,
+ fee: str,
+ reuse_puzhash: bool,
+) -> None: # pragma: no cover
+ import asyncio
+
+ from chia.cmds.cmds_util import execute_with_wallet
+
+ from .wallet_funcs import spend_vc
+
+ extra_params = {
+ "vc_id": vc_id,
+ "new_puzhash": new_puzhash,
+ "new_proof_hash": new_proof_hash,
+ "fee": fee,
+ "reuse_puzhash": reuse_puzhash,
+ }
+ asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, spend_vc))
+
+
+@vcs_cmd.command("add_proof_reveal", short_help="Add a series of proofs that will combine to a single proof hash")
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-p", "--proof", help="A flag to add as a proof", type=str, multiple=True)
+@click.option("-r", "--root-only", help="Do not add the proofs to the DB, just output the root", is_flag=True)
+def _add_proof_reveal(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ proof: List[str],
+ root_only: bool,
+) -> None: # pragma: no cover
+ import asyncio
+
+ from chia.cmds.cmds_util import execute_with_wallet
+
+ from .wallet_funcs import add_proof_reveal
+
+ extra_params = {
+ "proofs": proof,
+ "root_only": root_only,
+ }
+ asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, add_proof_reveal))
+
+
+@vcs_cmd.command("get_proofs_for_root", short_help="Get the stored proof flags for a given proof hash")
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-r", "--proof-hash", help="The root to search for", type=str, required=True)
+def _get_proofs_for_root(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ proof_hash: str,
+) -> None: # pragma: no cover
+ import asyncio
+
+ from chia.cmds.cmds_util import execute_with_wallet
+
+ from .wallet_funcs import get_proofs_for_root
+
+ extra_params = {
+ "proof_hash": proof_hash,
+ }
+ asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, get_proofs_for_root))
+
+
+@vcs_cmd.command("revoke", short_help="Revoke any VC if you have the proper DID and the VCs parent coin")
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-p", "--parent-coin-id", help="The ID of the parent coin of the VC", type=str, required=True)
+@click.option("-m", "--fee", help="Blockchain fee for revocation transaction", type=str, required=False)
+@click.option(
+ "--reuse-puzhash/--generate-new-puzhash",
+ help="Send the VC back to the same puzzle hash it came from (ignored if --new-puzhash is specified)",
+ default=False,
+ show_default=True,
+)
+def _revoke_vc(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ parent_coin_id: str,
+ fee: str,
+ reuse_puzhash: bool,
+) -> None: # pragma: no cover
+ import asyncio
+
+ from chia.cmds.cmds_util import execute_with_wallet
+
+ from .wallet_funcs import revoke_vc
+
+ extra_params = {
+ "parent_coin_id": parent_coin_id,
+ "fee": fee,
+ "reuse_puzhash": reuse_puzhash,
+ }
+ asyncio.run(execute_with_wallet(wallet_rpc_port, fingerprint, extra_params, revoke_vc))
diff --git a/chia/cmds/wallet_funcs.py b/chia/cmds/wallet_funcs.py
--- a/chia/cmds/wallet_funcs.py
+++ b/chia/cmds/wallet_funcs.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import asyncio
+import json
import os
import pathlib
import sys
@@ -29,6 +30,7 @@
from chia.wallet.util.address_type import AddressType, ensure_valid_address
from chia.wallet.util.transaction_type import TransactionType
from chia.wallet.util.wallet_types import WalletType
+from chia.wallet.vc_wallet.vc_store import VCProofs
CATNameResolver = Callable[[bytes32], Awaitable[Optional[Tuple[Optional[uint32], str]]]]
@@ -63,7 +65,12 @@ def print_transaction(tx: TransactionRecord, verbose: bool, name, address_prefix
def get_mojo_per_unit(wallet_type: WalletType) -> int:
mojo_per_unit: int
- if wallet_type in {WalletType.STANDARD_WALLET, WalletType.POOLING_WALLET, WalletType.DATA_LAYER}:
+ if wallet_type in {
+ WalletType.STANDARD_WALLET,
+ WalletType.POOLING_WALLET,
+ WalletType.DATA_LAYER,
+ WalletType.VC,
+ }: # pragma: no cover
mojo_per_unit = units["chia"]
elif wallet_type == WalletType.CAT:
mojo_per_unit = units["cat"]
@@ -90,7 +97,12 @@ async def get_unit_name_for_wallet_id(
wallet_id: int,
wallet_client: WalletRpcClient,
):
- if wallet_type in {WalletType.STANDARD_WALLET, WalletType.POOLING_WALLET, WalletType.DATA_LAYER}:
+ if wallet_type in {
+ WalletType.STANDARD_WALLET,
+ WalletType.POOLING_WALLET,
+ WalletType.DATA_LAYER,
+ WalletType.VC,
+ }: # pragma: no cover
name = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"].upper()
elif wallet_type == WalletType.CAT:
name = await wallet_client.get_cat_name(wallet_id=wallet_id)
@@ -462,29 +474,33 @@ async def make_offer(args: dict, wallet_client: WalletRpcClient, fingerprint: in
if confirmation not in ["y", "yes"]:
print("Not creating offer...")
else:
- offer, trade_record = await wallet_client.create_offer_for_ids(
- offer_dict, driver_dict=driver_dict, fee=fee, reuse_puzhash=reuse_puzhash
- )
- if offer is not None:
- with open(pathlib.Path(filepath), "w") as file:
+ with open(pathlib.Path(filepath), "w") as file:
+ offer, trade_record = await wallet_client.create_offer_for_ids(
+ offer_dict, driver_dict=driver_dict, fee=fee, reuse_puzhash=reuse_puzhash
+ )
+ if offer is not None:
file.write(offer.to_bech32())
- print(f"Created offer with ID {trade_record.trade_id}")
- print(f"Use chia wallet get_offers --id {trade_record.trade_id} -f {fingerprint} to view status")
- else:
- print("Error creating offer")
+ print(f"Created offer with ID {trade_record.trade_id}")
+ print(
+ f"Use chia wallet get_offers --id {trade_record.trade_id} -f {fingerprint} to view status"
+ )
+ else:
+ print("Error creating offer")
def timestamp_to_time(timestamp):
return datetime.fromtimestamp(timestamp).strftime("%Y-%m-%d %H:%M:%S")
-async def print_offer_summary(cat_name_resolver: CATNameResolver, sum_dict: Dict[str, int], has_fee: bool = False):
+async def print_offer_summary(
+ cat_name_resolver: CATNameResolver, sum_dict: Dict[str, int], has_fee: bool = False, network_xch="XCH"
+):
for asset_id, amount in sum_dict.items():
description: str = ""
unit: int = units["chia"]
wid: str = "1" if asset_id == "xch" else ""
mojo_amount: int = int(Decimal(amount))
- name: str = "XCH"
+ name: str = network_xch
if asset_id != "xch":
name = asset_id
if asset_id == "unknown":
@@ -627,11 +643,12 @@ async def take_offer(args: dict, wallet_client: WalletRpcClient, fingerprint: in
offered, requested, _ = offer.summary()
cat_name_resolver = wallet_client.cat_asset_id_to_name
+ network_xch = AddressType.XCH.hrp(config).upper()
print("Summary:")
print(" OFFERED:")
- await print_offer_summary(cat_name_resolver, offered)
+ await print_offer_summary(cat_name_resolver, offered, network_xch=network_xch)
print(" REQUESTED:")
- await print_offer_summary(cat_name_resolver, requested)
+ await print_offer_summary(cat_name_resolver, requested, network_xch=network_xch)
print()
@@ -651,7 +668,7 @@ async def take_offer(args: dict, wallet_client: WalletRpcClient, fingerprint: in
if fungible_asset_id_str in requested:
nft_royalty_currency: str = "Unknown CAT"
if fungible_asset_id is None:
- nft_royalty_currency = "XCH"
+ nft_royalty_currency = network_xch
else:
result = await wallet_client.cat_asset_id_to_name(fungible_asset_id)
if result is not None:
@@ -667,7 +684,7 @@ async def take_offer(args: dict, wallet_client: WalletRpcClient, fingerprint: in
for nft_id, summaries in royalty_summary.items():
print(f" - For {nft_id}:")
for summary in summaries:
- divisor = units["chia"] if summary["asset"] == "XCH" else units["cat"]
+ divisor = units["chia"] if summary["asset"] == network_xch else units["cat"]
converted_amount = Decimal(summary["amount"]) / divisor
total_amounts_requested.setdefault(summary["asset"], fungible_asset_dict[summary["asset"]])
total_amounts_requested[summary["asset"]] += summary["amount"]
@@ -678,11 +695,11 @@ async def take_offer(args: dict, wallet_client: WalletRpcClient, fingerprint: in
print()
print("Total Amounts Requested:")
for asset, amount in total_amounts_requested.items():
- divisor = units["chia"] if asset == "XCH" else units["cat"]
+ divisor = units["chia"] if asset == network_xch else units["cat"]
converted_amount = Decimal(amount) / divisor
print(f" - {converted_amount} {asset} ({amount} mojos)")
- print(f"Included Fees: {Decimal(offer.fees()) / units['chia']} XCH, {offer.fees()} mojos")
+ print(f"Included Fees: {Decimal(offer.fees()) / units['chia']} {network_xch}, {offer.fees()} mojos")
if not examine_only:
print()
@@ -828,6 +845,80 @@ async def get_did(args: Dict, wallet_client: WalletRpcClient, fingerprint: int)
print(f"Failed to get DID: {e}")
+async def get_did_info(args: Dict, wallet_client: WalletRpcClient, fingerprint: int) -> None:
+ coin_id: str = args["coin_id"]
+ latest: bool = args["latest"]
+ did_padding_length = 23
+ try:
+ response = await wallet_client.get_did_info(coin_id, latest)
+ print(f"{'DID:'.ljust(did_padding_length)} {response['did_id']}")
+ print(f"{'Coin ID:'.ljust(did_padding_length)} {response['latest_coin']}")
+ print(f"{'Inner P2 Address:'.ljust(did_padding_length)} {response['p2_address']}")
+ print(f"{'Public Key:'.ljust(did_padding_length)} {response['public_key']}")
+ print(f"{'Launcher ID:'.ljust(did_padding_length)} {response['launcher_id']}")
+ print(f"{'DID Metadata:'.ljust(did_padding_length)} {response['metadata']}")
+ print(f"{'Recovery List Hash:'.ljust(did_padding_length)} {response['recovery_list_hash']}")
+ print(f"{'Recovery Required Verifications:'.ljust(did_padding_length)} {response['num_verification']}")
+ print(f"{'Last Spend Puzzle:'.ljust(did_padding_length)} {response['full_puzzle']}")
+ print(f"{'Last Spend Solution:'.ljust(did_padding_length)} {response['solution']}")
+ print(f"{'Last Spend Hints:'.ljust(did_padding_length)} {response['hints']}")
+
+ except Exception as e:
+ print(f"Failed to get DID details: {e}")
+
+
+async def update_did_metadata(args: Dict, wallet_client: WalletRpcClient, fingerprint: int) -> None:
+ try:
+ response = await wallet_client.update_did_metadata(
+ args["did_wallet_id"], json.loads(args["metadata"]), args["reuse_puzhash"]
+ )
+ print(f"Successfully updated DID wallet ID: {response['wallet_id']}, Spend Bundle: {response['spend_bundle']}")
+ except Exception as e:
+ print(f"Failed to update DID metadata: {e}")
+
+
+async def did_message_spend(args: Dict, wallet_client: WalletRpcClient, fingerprint: int) -> None:
+ try:
+ response = await wallet_client.did_message_spend(
+ args["did_wallet_id"], args["puzzle_announcements"], args["coin_announcements"]
+ )
+ print(f"Message Spend Bundle: {response['spend_bundle']}")
+ except Exception as e:
+ print(f"Failed to update DID metadata: {e}")
+
+
+async def transfer_did(args: Dict, wallet_client: WalletRpcClient, fingerprint: int) -> None:
+ try:
+ response = await wallet_client.did_transfer_did(
+ args["did_wallet_id"],
+ args["target_address"],
+ args["fee"],
+ args["with_recovery"],
+ args["reuse_puzhash"],
+ )
+ print(f"Successfully transferred DID to {args['target_address']}")
+ print(f"Transaction ID: {response['transaction_id']}")
+ print(f"Transaction: {response['transaction']}")
+ except Exception as e:
+ print(f"Failed to transfer DID: {e}")
+
+
+async def find_lost_did(args: Dict, wallet_client: WalletRpcClient, fingerprint: int) -> None:
+ try:
+ response = await wallet_client.find_lost_did(
+ args["coin_id"],
+ args["recovery_list_hash"],
+ args["metadata"],
+ args["num_verification"],
+ )
+ if response["success"]:
+ print(f"Successfully found lost DID {args['coin_id']}, latest coin ID: {response['latest_coin_id']}")
+ else:
+ print(f"Cannot find lost DID {args['coin_id']}: {response['error']}")
+ except Exception as e:
+ print(f"Failed to find lost DID: {e}")
+
+
async def create_nft_wallet(args: Dict, wallet_client: WalletRpcClient, fingerprint: int) -> None:
did_id = args["did_id"]
name = args["name"]
@@ -1132,3 +1223,115 @@ async def sign_message(args: Dict, wallet_client: WalletRpcClient, fingerprint:
print(f"Public Key: {pubkey}")
print(f"Signature: {signature}")
print(f"Signing Mode: {signing_mode}")
+
+
+async def mint_vc(args: Dict, wallet_client: WalletRpcClient, fingerprint: int) -> None: # pragma: no cover
+ config = load_config(DEFAULT_ROOT_PATH, "config.yaml", SERVICE_NAME)
+ vc_record, txs = await wallet_client.vc_mint(
+ decode_puzzle_hash(ensure_valid_address(args["did"], allowed_types={AddressType.DID}, config=config)),
+ None
+ if args["target_address"] is None
+ else decode_puzzle_hash(
+ ensure_valid_address(args["target_address"], allowed_types={AddressType.XCH}, config=config)
+ ),
+ uint64(0) if args["fee"] is None else uint64(int(Decimal(args["fee"]) * units["chia"])),
+ )
+
+ print(f"New VC with launcher ID minted: {vc_record.vc.launcher_id}")
+ print("Relevant TX records:")
+ print("")
+ for tx in txs:
+ print_transaction(
+ tx,
+ verbose=False,
+ name="XCH",
+ address_prefix=selected_network_address_prefix(config),
+ mojo_per_unit=get_mojo_per_unit(wallet_type=WalletType.STANDARD_WALLET),
+ )
+
+
+async def get_vcs(args: Dict, wallet_client: WalletRpcClient, fingerprint: int) -> None: # pragma: no cover
+ config = load_config(DEFAULT_ROOT_PATH, "config.yaml", SERVICE_NAME)
+ vc_records, proofs = await wallet_client.vc_get_list(args["start"], args["count"])
+ print("Proofs:")
+ for hash, proof_dict in proofs.items():
+ print(f"- {hash}")
+ for proof in proof_dict:
+ print(f" - {proof}")
+ for record in vc_records:
+ print("")
+ print(f"Launcher ID: {record.vc.launcher_id.hex()}")
+ print(f"Coin ID: {record.vc.coin.name().hex()}")
+ print(
+ f"Inner Address: {encode_puzzle_hash(record.vc.inner_puzzle_hash, selected_network_address_prefix(config))}"
+ )
+ if record.vc.proof_hash is None:
+ pass
+ else:
+ print(f"Proof Hash: {record.vc.proof_hash.hex()}")
+
+
+async def spend_vc(args: Dict, wallet_client: WalletRpcClient, fingerprint: int) -> None: # pragma: no cover
+ config = load_config(DEFAULT_ROOT_PATH, "config.yaml", SERVICE_NAME)
+ txs = await wallet_client.vc_spend(
+ bytes32.from_hexstr(args["vc_id"]),
+ new_puzhash=None if args["new_puzhash"] is None else bytes32.from_hexstr(args["new_puzhash"]),
+ new_proof_hash=bytes32.from_hexstr(args["new_proof_hash"]),
+ fee=uint64(0) if args["fee"] is None else uint64(int(Decimal(args["fee"]) * units["chia"])),
+ reuse_puzhash=args["reuse_puzhash"],
+ )
+
+ print("Proofs successfully updated!")
+ print("Relevant TX records:")
+ print("")
+ for tx in txs:
+ print_transaction(
+ tx,
+ verbose=False,
+ name="XCH",
+ address_prefix=selected_network_address_prefix(config),
+ mojo_per_unit=get_mojo_per_unit(wallet_type=WalletType.STANDARD_WALLET),
+ )
+
+
+async def add_proof_reveal(args: Dict, wallet_client: WalletRpcClient, fingerprint: int) -> None: # pragma: no cover
+ if len(args["proofs"]) == 0:
+ print("Must specify at least one proof")
+ return
+
+ proof_dict: Dict[str, str] = {proof: "1" for proof in args["proofs"]}
+ if args["root_only"]:
+ print(f"Proof Hash: {VCProofs(proof_dict).root()}")
+ return
+ else:
+ await wallet_client.vc_add_proofs(proof_dict)
+ print("Proofs added to DB successfully!")
+ return
+
+
+async def get_proofs_for_root(args: Dict, wallet_client: WalletRpcClient, fingerprint: int) -> None: # pragma: no cover
+ proof_dict: Dict[str, str] = await wallet_client.vc_get_proofs_for_root(bytes32.from_hexstr(args["proof_hash"]))
+ print("Proofs:")
+ for proof in proof_dict:
+ print(f" - {proof}")
+
+
+async def revoke_vc(args: Dict, wallet_client: WalletRpcClient, fingerprint: int) -> None: # pragma: no cover
+ config = load_config(DEFAULT_ROOT_PATH, "config.yaml", SERVICE_NAME)
+ txs = await wallet_client.vc_revoke(
+ bytes32.from_hexstr(args["parent_coin_id"]),
+ fee=uint64(0) if args["fee"] is None else uint64(int(Decimal(args["fee"]) * units["chia"])),
+ reuse_puzhash=args["reuse_puzhash"],
+ )
+
+ print("VC successfully revoked!")
+ print("Relevant TX records:")
+ print("")
+ for tx in txs:
+ print_transaction(
+ tx,
+ verbose=False,
+ name="XCH",
+ address_prefix=selected_network_address_prefix(config),
+ mojo_per_unit=get_mojo_per_unit(wallet_type=WalletType.STANDARD_WALLET),
+ )
diff --git a/chia/consensus/block_body_validation.py b/chia/consensus/block_body_validation.py
--- a/chia/consensus/block_body_validation.py
+++ b/chia/consensus/block_body_validation.py
@@ -480,9 +480,7 @@ async def validate_block_body(
pairs_msgs: List[bytes] = []
if npc_result:
assert npc_result.conds is not None
- pairs_pks, pairs_msgs = pkm_pairs(
- npc_result.conds, constants.AGG_SIG_ME_ADDITIONAL_DATA, soft_fork=height >= constants.SOFT_FORK_HEIGHT
- )
+ pairs_pks, pairs_msgs = pkm_pairs(npc_result.conds, constants.AGG_SIG_ME_ADDITIONAL_DATA)
# 22. Verify aggregated signature
# TODO: move this to pre_validate_blocks_multiprocessing so we can sync faster
diff --git a/chia/consensus/block_header_validation.py b/chia/consensus/block_header_validation.py
--- a/chia/consensus/block_header_validation.py
+++ b/chia/consensus/block_header_validation.py
@@ -815,7 +815,11 @@ def validate_unfinished_header_block(
return None, ValidationError(Err.INVALID_TRANSACTIONS_FILTER_HASH)
# 26a. The timestamp in Foliage Block must not be over 5 minutes in the future
- if header_block.foliage_transaction_block.timestamp > int(time.time() + constants.MAX_FUTURE_TIME):
+ if height >= constants.SOFT_FORK2_HEIGHT:
+ max_future_time = constants.MAX_FUTURE_TIME2
+ else:
+ max_future_time = constants.MAX_FUTURE_TIME
+ if header_block.foliage_transaction_block.timestamp > int(time.time() + max_future_time):
return None, ValidationError(Err.TIMESTAMP_TOO_FAR_IN_FUTURE)
if prev_b is not None:
diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py
--- a/chia/consensus/blockchain.py
+++ b/chia/consensus/blockchain.py
@@ -304,6 +304,8 @@ async def add_block(
else:
return AddBlockResult.ADDED_AS_ORPHAN, None, None
+ # only to be called under short fork points
+ # under deep reorgs this can cause OOM
async def _reconsider_peak(
self,
block_record: BlockRecord,
diff --git a/chia/consensus/constants.py b/chia/consensus/constants.py
--- a/chia/consensus/constants.py
+++ b/chia/consensus/constants.py
@@ -36,6 +36,7 @@ class ConsensusConstants:
SUB_SLOT_TIME_TARGET: int # The target number of seconds per sub-slot
NUM_SP_INTERVALS_EXTRA: int # The difference between signage point and infusion point (plus required_iters)
MAX_FUTURE_TIME: int # The next block can have a timestamp of at most these many seconds more
+ MAX_FUTURE_TIME2: int # After soft-fork2, this is the new MAX_FUTURE_TIME
NUMBER_OF_TIMESTAMPS: int # Than the average of the last NUMBER_OF_TIMESTAMPS blocks
# Used as the initial cc rc challenges, as well as first block back pointers, and first SES back pointer
# We override this value based on the chain being run (testnet0, testnet1, mainnet, etc)
@@ -61,12 +62,22 @@ class ConsensusConstants:
MAX_GENERATOR_SIZE: uint32
MAX_GENERATOR_REF_LIST_SIZE: uint32
POOL_SUB_SLOT_ITERS: uint64
- # soft fork initiated in 1.7.0 release
- SOFT_FORK_HEIGHT: uint32
# soft fork initiated in 1.8.0 release
SOFT_FORK2_HEIGHT: uint32
+ # soft fork initiated in 2.0 release
+ SOFT_FORK3_HEIGHT: uint32
+
+ # the hard fork planned with the 2.0 release
+ # this is the block with the first plot filter adjustment
+ HARD_FORK_HEIGHT: uint32
+
+ # the plot filter adjustment heights
+ PLOT_FILTER_128_HEIGHT: uint32
+ PLOT_FILTER_64_HEIGHT: uint32
+ PLOT_FILTER_32_HEIGHT: uint32
+
def replace(self, **changes: object) -> "ConsensusConstants":
return dataclasses.replace(self, **changes)
diff --git a/chia/consensus/default_constants.py b/chia/consensus/default_constants.py
--- a/chia/consensus/default_constants.py
+++ b/chia/consensus/default_constants.py
@@ -26,6 +26,7 @@
"SUB_SLOT_TIME_TARGET": 600, # The target number of seconds per slot, mainnet 600
"NUM_SP_INTERVALS_EXTRA": 3, # The number of sp intervals to add to the signage point
"MAX_FUTURE_TIME": 5 * 60, # The next block can have a timestamp of at most these many seconds in the future
+ "MAX_FUTURE_TIME2": 2 * 60, # The next block can have a timestamp of at most these many seconds in the future
"NUMBER_OF_TIMESTAMPS": 11, # Than the average of the last NUMBER_OF_TIMESTAMPS blocks
# Used as the initial cc rc challenges, as well as first block back pointers, and first SES back pointer
# We override this value based on the chain being run (testnet0, testnet1, mainnet, etc)
@@ -55,8 +56,17 @@
"MAX_GENERATOR_SIZE": 1000000,
"MAX_GENERATOR_REF_LIST_SIZE": 512, # Number of references allowed in the block generator ref list
"POOL_SUB_SLOT_ITERS": 37600000000, # iters limit * NUM_SPS
- "SOFT_FORK_HEIGHT": 3630000,
- "SOFT_FORK2_HEIGHT": 4000000,
+ "SOFT_FORK2_HEIGHT": 3886635,
+ # Spetember 2023
+ "SOFT_FORK3_HEIGHT": 4200000,
+ # June 2024
+ "HARD_FORK_HEIGHT": 5496000,
+ # June 2027
+ "PLOT_FILTER_128_HEIGHT": 10542000,
+ # June 2030
+ "PLOT_FILTER_64_HEIGHT": 15592000,
+ # June 2033
+ "PLOT_FILTER_32_HEIGHT": 20643000,
}
diff --git a/chia/consensus/multiprocess_validation.py b/chia/consensus/multiprocess_validation.py
--- a/chia/consensus/multiprocess_validation.py
+++ b/chia/consensus/multiprocess_validation.py
@@ -122,11 +122,7 @@ def batch_pre_validate_blocks(
if validate_signatures:
if npc_result is not None and block.transactions_info is not None:
assert npc_result.conds
- pairs_pks, pairs_msgs = pkm_pairs(
- npc_result.conds,
- constants.AGG_SIG_ME_ADDITIONAL_DATA,
- soft_fork=block.height >= constants.SOFT_FORK_HEIGHT,
- )
+ pairs_pks, pairs_msgs = pkm_pairs(npc_result.conds, constants.AGG_SIG_ME_ADDITIONAL_DATA)
# Using AugSchemeMPL.aggregate_verify, so it's safe to use from_bytes_unchecked
pks_objects: List[G1Element] = [G1Element.from_bytes_unchecked(pk) for pk in pairs_pks]
if not AugSchemeMPL.aggregate_verify(
@@ -198,7 +194,6 @@ async def pre_validate_blocks_multiprocessing(
prev_b: Optional[BlockRecord] = None
# Collects all the recent blocks (up to the previous sub-epoch)
recent_blocks: Dict[bytes32, BlockRecord] = {}
- recent_blocks_compressed: Dict[bytes32, BlockRecord] = {}
num_sub_slots_found = 0
num_blocks_seen = 0
if blocks[0].height > 0:
@@ -211,9 +206,6 @@ async def pre_validate_blocks_multiprocessing(
or num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS
or num_sub_slots_found < num_sub_slots_to_look_for
) and curr.height > 0:
- if num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS or num_sub_slots_found < num_sub_slots_to_look_for:
- recent_blocks_compressed[curr.header_hash] = curr
-
if curr.first_in_sub_slot:
assert curr.finished_challenge_slot_hashes is not None
num_sub_slots_found += len(curr.finished_challenge_slot_hashes)
@@ -222,7 +214,6 @@ async def pre_validate_blocks_multiprocessing(
num_blocks_seen += 1
curr = block_records.block_record(curr.prev_hash)
recent_blocks[curr.header_hash] = curr
- recent_blocks_compressed[curr.header_hash] = curr
block_record_was_present = []
for block in blocks:
block_record_was_present.append(block_records.contains_block(block.header_hash))
@@ -282,10 +273,8 @@ async def pre_validate_blocks_multiprocessing(
if not block_records.contains_block(block_rec.header_hash):
block_records.add_block_record(block_rec) # Temporarily add block to dict
recent_blocks[block_rec.header_hash] = block_rec
- recent_blocks_compressed[block_rec.header_hash] = block_rec
else:
recent_blocks[block_rec.header_hash] = block_records.block_record(block_rec.header_hash)
- recent_blocks_compressed[block_rec.header_hash] = block_records.block_record(block_rec.header_hash)
prev_b = block_rec
diff_ssis.append((difficulty, sub_slot_iters))
@@ -295,19 +284,15 @@ async def pre_validate_blocks_multiprocessing(
if not block_record_was_present[i]:
block_records.remove_block_record(block.header_hash)
- recent_sb_compressed_pickled = {bytes(k): bytes(v) for k, v in recent_blocks_compressed.items()}
npc_results_pickled = {}
for k, v in npc_results.items():
npc_results_pickled[k] = bytes(v)
futures = []
# Pool of workers to validate blocks concurrently
+ recent_blocks_bytes = {bytes(k): bytes(v) for k, v in recent_blocks.items()} # convert to bytes
for i in range(0, len(blocks), batch_size):
end_i = min(i + batch_size, len(blocks))
blocks_to_validate = blocks[i:end_i]
- if any([len(block.finished_sub_slots) > 0 for block in blocks_to_validate]):
- final_pickled = {bytes(k): bytes(v) for k, v in recent_blocks.items()}
- else:
- final_pickled = recent_sb_compressed_pickled
b_pickled: Optional[List[bytes]] = None
hb_pickled: Optional[List[bytes]] = None
previous_generators: List[Optional[bytes]] = []
@@ -349,7 +334,7 @@ async def pre_validate_blocks_multiprocessing(
pool,
batch_pre_validate_blocks,
constants,
- final_pickled,
+ recent_blocks_bytes,
b_pickled,
hb_pickled,
previous_generators,
diff --git a/chia/daemon/client.py b/chia/daemon/client.py
--- a/chia/daemon/client.py
+++ b/chia/daemon/client.py
@@ -215,6 +215,6 @@ async def acquire_connection_to_daemon(
yield daemon # <----
except Exception as e:
print(f"Exception occurred while communicating with the daemon: {e}")
-
- if daemon is not None:
- await daemon.close()
+ finally:
+ if daemon is not None:
+ await daemon.close()
diff --git a/chia/daemon/server.py b/chia/daemon/server.py
--- a/chia/daemon/server.py
+++ b/chia/daemon/server.py
@@ -25,8 +25,7 @@
from chia.daemon.windows_signal import kill
from chia.plotters.plotters import get_available_plotters
from chia.plotting.util import add_plot_directory
-from chia.server.server import ssl_context_for_root, ssl_context_for_server
-from chia.ssl.create_ssl import get_mozilla_ca_crt
+from chia.server.server import ssl_context_for_server
from chia.util.beta_metrics import BetaMetricsLogger
from chia.util.chia_logging import initialize_service_logging
from chia.util.config import load_config
@@ -42,7 +41,7 @@
io_pool_exc = ThreadPoolExecutor()
try:
- from aiohttp import ClientSession, WSMsgType, web
+ from aiohttp import WSMsgType, web
from aiohttp.web_ws import WebSocketResponse
except ModuleNotFoundError:
print("Error: Make sure to run . ./activate from the project folder before starting Chia.")
@@ -54,21 +53,6 @@
service_plotter = "chia_plotter"
-async def fetch(url: str):
- async with ClientSession() as session:
- try:
- mozilla_root = get_mozilla_ca_crt()
- ssl_context = ssl_context_for_root(mozilla_root, log=log)
- response = await session.get(url, ssl=ssl_context)
- if not response.ok:
- log.warning("Response not OK.")
- return None
- return await response.text()
- except Exception as e:
- log.error(f"Exception while fetching {url}, exception: {e}")
- return None
-
-
class PlotState(str, Enum):
SUBMITTED = "SUBMITTED"
RUNNING = "RUNNING"
diff --git a/chia/data_layer/data_layer.py b/chia/data_layer/data_layer.py
--- a/chia/data_layer/data_layer.py
+++ b/chia/data_layer/data_layer.py
@@ -1,7 +1,9 @@
from __future__ import annotations
import asyncio
+import json
import logging
+import os
import random
import time
import traceback
@@ -18,6 +20,7 @@
Layer,
Offer,
OfferStore,
+ PluginStatus,
Proof,
ProofOfInclusion,
ProofOfInclusionLayer,
@@ -46,6 +49,18 @@
from chia.wallet.transaction_record import TransactionRecord
+async def get_plugin_info(url: str) -> Tuple[str, Dict[str, Any]]:
+ try:
+ async with aiohttp.ClientSession() as session:
+ async with session.post(url + "/plugin_info", json={}) as response:
+ ret = {"status": response.status}
+ if response.status == 200:
+ ret["response"] = json.loads(await response.text())
+ return url, ret
+ except aiohttp.ClientError as e:
+ return url, {"error": f"ClientError: {e}"}
+
+
class DataLayer:
data_store: DataStore
db_path: Path
@@ -58,6 +73,8 @@ class DataLayer:
none_bytes: bytes32
lock: asyncio.Lock
_server: Optional[ChiaServer]
+ downloaders: List[str]
+ uploaders: List[str]
@property
def server(self) -> ChiaServer:
@@ -73,6 +90,8 @@ def __init__(
config: Dict[str, Any],
root_path: Path,
wallet_rpc_init: Awaitable[WalletRpcClient],
+ downloaders: List[str],
+ uploaders: List[str], # dont add FilesystemUploader to this, it is the default uploader
name: Optional[str] = None,
):
if name == "":
@@ -96,6 +115,8 @@ def __init__(
self.none_bytes = bytes32([0] * 32)
self.lock = asyncio.Lock()
self._server = None
+ self.downloaders = downloaders
+ self.uploaders = uploaders
def _set_state_changed_callback(self, callback: StateChangedProtocol) -> None:
self.state_changed_callback = callback
@@ -352,6 +373,7 @@ async def fetch_and_validate(self, tree_id: bytes32) -> None:
random.shuffle(servers_info)
for server_info in servers_info:
url = server_info.url
+
root = await self.data_store.get_tree_root(tree_id=tree_id)
if root.generation > singleton_record.generation:
self.log.info(
@@ -375,7 +397,6 @@ async def fetch_and_validate(self, tree_id: bytes32) -> None:
min_generation=uint32(root.generation + 1),
max_generation=singleton_record.generation,
)
-
try:
timeout = self.config.get("client_timeout", 15)
proxy_url = self.config.get("proxy_url", None)
@@ -389,6 +410,7 @@ async def fetch_and_validate(self, tree_id: bytes32) -> None:
timeout,
self.log,
proxy_url,
+ await self.get_downloader(tree_id, url),
)
if success:
self.log.info(
@@ -404,7 +426,21 @@ async def fetch_and_validate(self, tree_id: bytes32) -> None:
except Exception as e:
self.log.warning(f"Exception while downloading files for {tree_id}: {e} {traceback.format_exc()}.")
+ async def get_downloader(self, tree_id: bytes32, url: str) -> Optional[str]:
+ request_json = {"store_id": tree_id.hex(), "url": url}
+ for d in self.downloaders:
+ async with aiohttp.ClientSession() as session:
+ try:
+ async with session.post(d + "/handle_download", json=request_json) as response:
+ res_json = await response.json()
+ if res_json["handle_download"]:
+ return d
+ except Exception as e:
+ self.log.error(f"get_downloader could not get response: {type(e).__name__}: {e}")
+ return None
+
async def upload_files(self, tree_id: bytes32) -> None:
+ uploaders = await self.get_uploaders(tree_id)
singleton_record: Optional[SingletonRecord] = await self.wallet_rpc.dl_latest_singleton(tree_id, True)
if singleton_record is None:
self.log.info(f"Upload files: no on-chain record for {tree_id}.")
@@ -417,12 +453,37 @@ async def upload_files(self, tree_id: bytes32) -> None:
# If we make some batch updates, which get confirmed to the chain, we need to create the files.
# We iterate back and write the missing files, until we find the files already written.
root = await self.data_store.get_tree_root(tree_id=tree_id, generation=publish_generation)
- while publish_generation > 0 and await write_files_for_root(
- self.data_store,
- tree_id,
- root,
- self.server_files_location,
- ):
+ while publish_generation > 0:
+ write_file_result = await write_files_for_root(self.data_store, tree_id, root, self.server_files_location)
+ if not write_file_result.result:
+ # this particular return only happens if the files already exist, no need to log anything
+ break
+ try:
+ if uploaders is not None and len(uploaders) > 0:
+ request_json = {
+ "store_id": tree_id.hex(),
+ "full_tree_filename": write_file_result.full_tree.name,
+ "diff_filename": write_file_result.diff_tree.name,
+ }
+ for uploader in uploaders:
+ self.log.info(f"Using uploader {uploader} for store {tree_id.hex()}")
+ async with aiohttp.ClientSession() as session:
+ async with session.post(uploader + "/upload", json=request_json) as response:
+ res_json = await response.json()
+ if res_json["uploaded"]:
+ self.log.info(
+ f"Uploaded files to {uploader} for store {tree_id.hex()} "
+ "generation {publish_generation}"
+ )
+ else:
+ self.log.error(
+ f"Failed to upload files to, will retry later: {uploader} : {res_json}"
+ )
+ except Exception as e:
+ self.log.error(f"Exception uploading files, will retry later: tree id {tree_id}")
+ self.log.debug(f"Failed to upload files, cleaning local files: {type(e).__name__}: {e}")
+ os.remove(write_file_result.full_tree)
+ os.remove(write_file_result.diff_tree)
publish_generation -= 1
root = await self.data_store.get_tree_root(tree_id=tree_id, generation=publish_generation)
@@ -434,9 +495,24 @@ async def add_missing_files(self, store_id: bytes32, overwrite: bool, foldername
return
max_generation = min(singleton_record.generation, 0 if root is None else root.generation)
server_files_location = foldername if foldername is not None else self.server_files_location
+ files = []
for generation in range(1, max_generation + 1):
root = await self.data_store.get_tree_root(tree_id=store_id, generation=generation)
- await write_files_for_root(self.data_store, store_id, root, server_files_location, overwrite)
+ res = await write_files_for_root(self.data_store, store_id, root, server_files_location, overwrite)
+ files.append(res.diff_tree.name)
+ files.append(res.full_tree.name)
+
+ uploaders = await self.get_uploaders(store_id)
+ if uploaders is not None and len(uploaders) > 0:
+ request_json = {"store_id": store_id.hex(), "files": json.dumps(files)}
+ for uploader in uploaders:
+ async with aiohttp.ClientSession() as session:
+ async with session.post(uploader + "/add_missing_files", json=request_json) as response:
+ res_json = await response.json()
+ if not res_json["uploaded"]:
+ self.log.error(f"failed to upload to uploader {uploader}")
+ else:
+ self.log.debug(f"uploaded to uploader {uploader}")
async def subscribe(self, store_id: bytes32, urls: List[str]) -> None:
parsed_urls = [url.rstrip("/") for url in urls]
@@ -802,3 +878,25 @@ async def get_sync_status(self, store_id: bytes32) -> SyncStatus:
target_root_hash=singleton_record.root,
target_generation=singleton_record.generation,
)
+
+ async def get_uploaders(self, tree_id: bytes32) -> List[str]:
+ uploaders = []
+ for uploader in self.uploaders:
+ async with aiohttp.ClientSession() as session:
+ try:
+ async with session.post(uploader + "/handle_upload", json={"store_id": tree_id.hex()}) as response:
+ res_json = await response.json()
+ if res_json["handle_upload"]:
+ uploaders.append(uploader)
+ except Exception as e:
+ self.log.error(f"get_uploader could not get response {e}")
+ return uploaders
+
+ async def check_plugins(self) -> PluginStatus:
+ coros = [get_plugin_info(url=plugin) for plugin in {*self.uploaders, *self.downloaders}]
+ results = dict(await asyncio.gather(*coros))
+
+ uploader_status = {url: results.get(url, "unknown") for url in self.uploaders}
+ downloader_status = {url: results.get(url, "unknown") for url in self.downloaders}
+
+ return PluginStatus(uploaders=uploader_status, downloaders=downloader_status)
diff --git a/chia/data_layer/data_layer_util.py b/chia/data_layer/data_layer_util.py
--- a/chia/data_layer/data_layer_util.py
+++ b/chia/data_layer/data_layer_util.py
@@ -623,3 +623,17 @@ class SyncStatus:
generation: int
target_root_hash: bytes32
target_generation: int
+
+
+@dataclasses.dataclass(frozen=True)
+class PluginStatus:
+ uploaders: Dict[str, Dict[str, Any]]
+ downloaders: Dict[str, Dict[str, Any]]
+
+ def marshal(self) -> Dict[str, Any]:
+ return {
+ "plugin_status": {
+ "uploaders": self.uploaders,
+ "downloaders": self.downloaders,
+ }
+ }
diff --git a/chia/data_layer/data_layer_wallet.py b/chia/data_layer/data_layer_wallet.py
--- a/chia/data_layer/data_layer_wallet.py
+++ b/chia/data_layer/data_layer_wallet.py
@@ -4,10 +4,11 @@
import logging
import time
from operator import attrgetter
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, Type, TypeVar
+from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Set, Tuple, cast
from blspy import G1Element, G2Element
from clvm.EvalError import EvalError
+from typing_extensions import final
from chia.consensus.block_record import BlockRecord
from chia.data_layer.data_layer_errors import OfferIntegrityError
@@ -41,6 +42,7 @@
from chia.wallet.derivation_record import DerivationRecord
from chia.wallet.lineage_proof import LineageProof
from chia.wallet.outer_puzzles import AssetType
+from chia.wallet.payment import Payment
from chia.wallet.puzzle_drivers import PuzzleInfo, Solver
from chia.wallet.puzzles.singleton_top_layer_v1_1 import SINGLETON_LAUNCHER_HASH
from chia.wallet.sign_coin_spends import sign_coin_spends
@@ -49,7 +51,8 @@
from chia.wallet.util.compute_memos import compute_memos
from chia.wallet.util.merkle_utils import _simplify_merkle_proof
from chia.wallet.util.transaction_type import TransactionType
-from chia.wallet.util.wallet_types import AmountWithPuzzlehash, WalletType
+from chia.wallet.util.wallet_sync_utils import fetch_coin_spend, fetch_coin_spend_for_coin_state
+from chia.wallet.util.wallet_types import WalletType
from chia.wallet.wallet import Wallet
from chia.wallet.wallet_coin_record import WalletCoinRecord
from chia.wallet.wallet_info import WalletInfo
@@ -100,10 +103,13 @@ def from_json_dict(cls, json_dict: Dict[str, Any]) -> "Mirror":
)
-_T_DataLayerWallet = TypeVar("_T_DataLayerWallet", bound="DataLayerWallet")
+@final
+class DataLayerWallet:
+ if TYPE_CHECKING:
+ from chia.wallet.wallet_protocol import WalletProtocol
+ _protocol_check: ClassVar[WalletProtocol] = cast("DataLayerWallet", None)
-class DataLayerWallet:
wallet_state_manager: WalletStateManager
log: logging.Logger
wallet_info: WalletInfo
@@ -115,16 +121,14 @@ class DataLayerWallet:
@classmethod
async def create(
- cls: Type[_T_DataLayerWallet],
+ cls,
wallet_state_manager: WalletStateManager,
- wallet: Wallet,
wallet_info: WalletInfo,
- name: Optional[str] = None,
- ) -> _T_DataLayerWallet:
+ ) -> DataLayerWallet:
self = cls()
self.wallet_state_manager = wallet_state_manager
- self.log = logging.getLogger(name if name else __name__)
- self.standard_wallet = wallet
+ self.log = logging.getLogger(__name__)
+ self.standard_wallet = wallet_state_manager.main_wallet
self.wallet_info = wallet_info
self.wallet_id = uint8(self.wallet_info.id)
@@ -138,28 +142,22 @@ def id(self) -> uint32:
return self.wallet_info.id
@classmethod
- async def create_new_dl_wallet(
- cls: Type[_T_DataLayerWallet],
- wallet_state_manager: WalletStateManager,
- wallet: Wallet,
- name: Optional[str] = "DataLayer Wallet",
- ) -> _T_DataLayerWallet:
+ async def create_new_dl_wallet(cls, wallet_state_manager: WalletStateManager) -> DataLayerWallet:
"""
This must be called under the wallet state manager lock
"""
self = cls()
self.wallet_state_manager = wallet_state_manager
- self.log = logging.getLogger(name if name else __name__)
- self.standard_wallet = wallet
+ self.log = logging.getLogger(__name__)
+ self.standard_wallet = wallet_state_manager.main_wallet
for _, w in self.wallet_state_manager.wallets.items():
if w.type() == WalletType.DATA_LAYER:
raise ValueError("DataLayer Wallet already exists for this key")
- assert name is not None
self.wallet_info = await wallet_state_manager.user_store.create_wallet(
- name,
+ "DataLayer Wallet",
WalletType.DATA_LAYER.value,
"",
)
@@ -222,29 +220,15 @@ async def track_new_launcher_id(
if await self.wallet_state_manager.dl_store.get_launcher(launcher_id) is not None:
self.log.info(f"Spend of launcher {launcher_id} has already been processed")
return None
- if spend is not None and spend.coin.name() == launcher_id: # spend.coin.name() == launcher_id is a sanity check
- await self.new_launcher_spend(spend, peer, height)
- else:
+ if spend is None or height is None:
launcher_state: CoinState = await self.get_launcher_coin_state(launcher_id, peer)
- launcher_spend: CoinSpend = await self.wallet_state_manager.wallet_node.fetch_puzzle_solution(
- launcher_state.spent_height, launcher_state.coin, peer
- )
- await self.new_launcher_spend(launcher_spend, peer)
+ spend = await fetch_coin_spend_for_coin_state(launcher_state, peer)
+ assert launcher_state.spent_height is not None
+ height = uint32(launcher_state.spent_height)
- async def new_launcher_spend(
- self,
- launcher_spend: CoinSpend,
- peer: WSChiaConnection,
- height: Optional[uint32] = None,
- ) -> None:
- launcher_id: bytes32 = launcher_spend.coin.name()
- if height is None:
- coin_state = await self.get_launcher_coin_state(launcher_id, peer)
- height = None if coin_state.spent_height is None else uint32(coin_state.spent_height)
- assert height is not None
- full_puzhash, amount, root, inner_puzhash = launch_solution_to_singleton_info(
- launcher_spend.solution.to_program()
- )
+ assert spend.coin.name() == launcher_id, "coin_id should always match the launcher_id here"
+
+ full_puzhash, amount, root, inner_puzhash = launch_solution_to_singleton_info(spend.solution.to_program())
new_singleton = Coin(launcher_id, full_puzhash, amount)
singleton_record: Optional[SingletonRecord] = await self.wallet_state_manager.dl_store.get_latest_singleton(
@@ -279,7 +263,7 @@ async def new_launcher_spend(
)
)
- await self.wallet_state_manager.dl_store.add_launcher(launcher_spend.coin)
+ await self.wallet_state_manager.dl_store.add_launcher(spend.coin)
await self.wallet_state_manager.add_interested_puzzle_hashes([launcher_id], [self.id()])
await self.wallet_state_manager.add_interested_coin_ids([new_singleton.name()])
@@ -288,9 +272,7 @@ async def new_launcher_spend(
] = await self.wallet_state_manager.coin_store.get_coin_record(new_singleton.name())
while new_singleton_coin_record is not None and new_singleton_coin_record.spent_block_height > 0:
# We've already synced this before, so we need to sort of force a resync
- parent_spend: CoinSpend = await self.wallet_state_manager.wallet_node.fetch_puzzle_solution(
- new_singleton_coin_record.spent_block_height, new_singleton, peer
- )
+ parent_spend = await fetch_coin_spend(new_singleton_coin_record.spent_block_height, new_singleton, peer)
await self.singleton_removed(parent_spend, new_singleton_coin_record.spent_block_height)
try:
additions = compute_additions(parent_spend)
@@ -543,16 +525,16 @@ async def create_update_state_spend(
)
# Create the solution
- primaries: List[AmountWithPuzzlehash] = [
- {
- "puzzlehash": announce_only.get_tree_hash() if announce_new_state else new_puz_hash,
- "amount": singleton_record.lineage_proof.amount if new_amount is None else new_amount,
- "memos": [
+ primaries = [
+ Payment(
+ announce_only.get_tree_hash() if announce_new_state else new_puz_hash,
+ singleton_record.lineage_proof.amount if new_amount is None else new_amount,
+ [
launcher_id,
root_hash,
announce_only.get_tree_hash() if announce_new_state else new_puz_hash,
],
- }
+ )
]
inner_sol: Program = self.standard_wallet.make_solution(
primaries=primaries,
@@ -772,9 +754,7 @@ async def delete_mirror(
new_puzhash: bytes32 = await self.get_new_puzzlehash()
excess_fee: int = fee - mirror_coin.amount
inner_sol: Program = self.standard_wallet.make_solution(
- primaries=[{"puzzlehash": new_puzhash, "amount": uint64(mirror_coin.amount - fee), "memos": []}]
- if excess_fee < 0
- else [],
+ primaries=[Payment(new_puzhash, uint64(mirror_coin.amount - fee))] if excess_fee < 0 else [],
coin_announcements={b"$"} if excess_fee > 0 else None,
)
mirror_spend = CoinSpend(
@@ -838,9 +818,7 @@ async def coin_added(self, coin: Coin, height: uint32, peer: WSChiaConnection) -
parent_state: CoinState = (
await self.wallet_state_manager.wallet_node.get_coin_state([coin.parent_coin_info], peer=peer)
)[0]
- parent_spend: Optional[CoinSpend] = await self.wallet_state_manager.wallet_node.fetch_puzzle_solution(
- height, parent_state.coin, peer
- )
+ parent_spend = await fetch_coin_spend(height, parent_state.coin, peer)
assert parent_spend is not None
launcher_id, urls = get_mirror_info(
parent_spend.puzzle_reveal.to_program(), parent_spend.solution.to_program()
@@ -1405,9 +1383,3 @@ def verify_offer(
if taker_from_offer != taker_from_reference:
raise OfferIntegrityError("taker: reference and offer inclusions do not match")
-
-
-if TYPE_CHECKING:
- from chia.wallet.wallet_protocol import WalletProtocol
-
- _dummy: WalletProtocol = DataLayerWallet()
diff --git a/chia/data_layer/download_data.py b/chia/data_layer/download_data.py
--- a/chia/data_layer/download_data.py
+++ b/chia/data_layer/download_data.py
@@ -4,6 +4,7 @@
import logging
import os
import time
+from dataclasses import dataclass
from pathlib import Path
from typing import List, Optional
@@ -86,13 +87,20 @@ async def insert_into_data_store_from_file(
await data_store.insert_root_with_ancestor_table(tree_id=tree_id, node_hash=root_hash, status=Status.COMMITTED)
+@dataclass
+class WriteFilesResult:
+ result: bool
+ full_tree: Path
+ diff_tree: Path
+
+
async def write_files_for_root(
data_store: DataStore,
tree_id: bytes32,
root: Root,
foldername: Path,
overwrite: bool = False,
-) -> bool:
+) -> WriteFilesResult:
if root.node_hash is not None:
node_hash = root.node_hash
else:
@@ -124,7 +132,7 @@ async def write_files_for_root(
except FileExistsError:
pass
- return written
+ return WriteFilesResult(written, filename_full_tree, filename_diff_tree)
async def insert_from_delta_file(
@@ -137,37 +145,25 @@ async def insert_from_delta_file(
timeout: int,
log: logging.Logger,
proxy_url: str,
+ downloader: Optional[str],
) -> bool:
for root_hash in root_hashes:
timestamp = int(time.time())
existing_generation += 1
filename = get_delta_filename(tree_id, root_hash, existing_generation)
-
- try:
+ request_json = {"url": server_info.url, "client_folder": str(client_foldername), "filename": filename}
+ if downloader is None:
+ # use http downloader
+ if not await http_download(client_foldername, filename, proxy_url, server_info, timeout, log):
+ break
+ else:
+ log.info(f"Using downloader {downloader} for store {tree_id.hex()}.")
async with aiohttp.ClientSession() as session:
- headers = {"accept-encoding": "gzip"}
- async with session.get(
- server_info.url + "/" + filename, headers=headers, timeout=timeout, proxy=proxy_url
- ) as resp:
- resp.raise_for_status()
- size = int(resp.headers.get("content-length", 0))
- log.debug(f"Downloading delta file {filename}. Size {size} bytes.")
- progress_byte = 0
- progress_percentage = "{:.0%}".format(0)
- target_filename = client_foldername.joinpath(filename)
- with target_filename.open(mode="wb") as f:
- async for chunk, _ in resp.content.iter_chunks():
- f.write(chunk)
- progress_byte += len(chunk)
- new_percentage = "{:.0%}".format(progress_byte / size)
- if new_percentage != progress_percentage:
- progress_percentage = new_percentage
- log.info(f"Downloading delta file {filename}. {progress_percentage} of {size} bytes.")
- except Exception:
- target_filename = client_foldername.joinpath(filename)
- os.remove(target_filename)
- await data_store.server_misses_file(tree_id, server_info, timestamp)
- raise
+ async with session.post(downloader + "/download", json=request_json) as response:
+ res_json = await response.json()
+ if not res_json["downloaded"]:
+ log.error(f"Failed to download delta file {filename} from {downloader}: {res_json}")
+ break
log.info(f"Successfully downloaded delta file {filename}.")
try:
@@ -200,3 +196,34 @@ async def insert_from_delta_file(
raise
return True
+
+
+async def http_download(
+ client_folder: Path,
+ filename: str,
+ proxy_url: str,
+ server_info: ServerInfo,
+ timeout: int,
+ log: logging.Logger,
+) -> bool:
+ async with aiohttp.ClientSession() as session:
+ headers = {"accept-encoding": "gzip"}
+ async with session.get(
+ server_info.url + "/" + filename, headers=headers, timeout=timeout, proxy=proxy_url
+ ) as resp:
+ resp.raise_for_status()
+ size = int(resp.headers.get("content-length", 0))
+ log.debug(f"Downloading delta file {filename}. Size {size} bytes.")
+ progress_byte = 0
+ progress_percentage = "{:.0%}".format(0)
+ target_filename = client_folder.joinpath(filename)
+ with target_filename.open(mode="wb") as f:
+ async for chunk, _ in resp.content.iter_chunks():
+ f.write(chunk)
+ progress_byte += len(chunk)
+ new_percentage = "{:.0%}".format(progress_byte / size)
+ if new_percentage != progress_percentage:
+ progress_percentage = new_percentage
+ log.info(f"Downloading delta file {filename}. {progress_percentage} of {size} bytes.")
+
+ return True
diff --git a/chia/data_layer/s3_plugin_service.py b/chia/data_layer/s3_plugin_service.py
new file mode 100644
--- /dev/null
+++ b/chia/data_layer/s3_plugin_service.py
@@ -0,0 +1,410 @@
+from __future__ import annotations
+
+import asyncio
+import concurrent.futures
+import functools
+import json
+import logging
+import os
+import shutil
+import sys
+import tempfile
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Any, Dict, List, Optional, Set
+from urllib.parse import urlparse
+
+import boto3 as boto3
+import yaml
+from aiohttp import web
+from botocore.exceptions import ClientError
+
+from chia.data_layer.download_data import is_filename_valid
+from chia.types.blockchain_format.sized_bytes import bytes32
+
+log = logging.getLogger(__name__)
+plugin_name = "Chia S3 Datalayer plugin"
+plugin_version = "0.1.0"
+
+
+@dataclass(frozen=True)
+class StoreConfig:
+ id: bytes32
+ bucket: Optional[str]
+ urls: Set[str]
+
+ @classmethod
+ def unmarshal(cls, d: Dict[str, Any]) -> StoreConfig:
+ upload_bucket = d.get("upload_bucket", None)
+ if upload_bucket and len(upload_bucket) == 0:
+ upload_bucket = None
+
+ return StoreConfig(bytes32.from_hexstr(d["store_id"]), upload_bucket, d.get("download_urls", set()))
+
+ def marshal(self) -> Dict[str, Any]:
+ return {"store_id": self.id.hex(), "upload_bucket": self.bucket, "download_urls": self.urls}
+
+
+class S3Plugin:
+ boto_resource: boto3.resource
+ port: int
+ region: str
+ aws_access_key_id: str
+ aws_secret_access_key: str
+ server_files_path: Path
+ stores: List[StoreConfig]
+ instance_name: str
+
+ def __init__(
+ self,
+ region: str,
+ aws_access_key_id: str,
+ aws_secret_access_key: str,
+ server_files_path: Path,
+ stores: List[StoreConfig],
+ instance_name: str,
+ ):
+ self.boto_resource = boto3.resource(
+ "s3",
+ region_name=region,
+ aws_access_key_id=aws_access_key_id,
+ aws_secret_access_key=aws_secret_access_key,
+ )
+ self.stores = stores
+ self.instance_name = instance_name
+ self.server_files_path = server_files_path
+
+ async def add_store_id(self, request: web.Request) -> web.Response:
+ """Add a store id to the config file. Returns False for store ids that are already in the config."""
+ self.update_instance_from_config()
+ try:
+ data = await request.json()
+ store_id = bytes32.from_hexstr(data["store_id"])
+ except Exception as e:
+ log.error(f"failed parsing request {request} {type(e).__name__} {e}")
+ return web.json_response({"success": False})
+
+ bucket = data.get("bucket", None)
+ urls = data.get("urls", [])
+ if not bucket and not urls:
+ return web.json_response({"success": False, "reason": "bucket or urls must be provided"})
+
+ for stores in self.stores:
+ if store_id == stores.id:
+ return web.json_response({"success": False, "reason": f"store {store_id.hex()} already exists"})
+
+ new_store = StoreConfig(store_id, bucket, urls)
+ self.stores.append(new_store)
+ self.update_config()
+
+ return web.json_response({"success": True, "id": store_id.hex()})
+
+ async def remove_store_id(self, request: web.Request) -> web.Response:
+ """Remove a store id from the config file. Returns True for store ids that are not in the config."""
+ self.update_instance_from_config()
+ try:
+ data = await request.json()
+ store_id = bytes32.from_hexstr(data["store_id"])
+ except Exception as e:
+ log.error(f"failed parsing request {request} {e}")
+ return web.json_response({"success": False})
+
+ dirty = False
+ for i, store in enumerate(self.stores):
+ if store.id == store_id:
+ del self.stores[i]
+ dirty = True
+ break
+
+ if dirty:
+ self.update_config()
+
+ return web.json_response({"success": True, "store_id": store_id.hex()})
+
+ async def handle_upload(self, request: web.Request) -> web.Response:
+ self.update_instance_from_config()
+ try:
+ data = await request.json()
+ except Exception as e:
+ log.error(f"failed parsing request {request} {type(e).__name__} {e}")
+ return web.json_response({"handle_upload": False})
+
+ store_id = bytes32.from_hexstr(data["store_id"])
+ for store in self.stores:
+ if store.id == store_id and store.bucket:
+ return web.json_response({"handle_upload": True, "bucket": store.bucket})
+
+ return web.json_response({"handle_upload": False})
+
+ async def upload(self, request: web.Request) -> web.Response:
+ try:
+ data = await request.json()
+ store_id = bytes32.from_hexstr(data["store_id"])
+ bucket_str = self.get_bucket(store_id)
+ my_bucket = self.boto_resource.Bucket(bucket_str)
+ full_tree_name: str = data["full_tree_filename"]
+ diff_name: str = data["diff_filename"]
+
+ # filenames must follow the DataLayer naming convention
+ if not is_filename_valid(full_tree_name) or not is_filename_valid(diff_name):
+ return web.json_response({"uploaded": False})
+
+ # Pull the store_id from the filename to make sure we only upload for configured stores
+ full_tree_id = bytes32.fromhex(full_tree_name[:64])
+ diff_tree_id = bytes32.fromhex(diff_name[:64])
+
+ if not (full_tree_id == diff_tree_id == store_id):
+ return web.json_response({"uploaded": False})
+
+ full_tree_path = self.server_files_path.joinpath(full_tree_name)
+ diff_path = self.server_files_path.joinpath(diff_name)
+
+ try:
+ with concurrent.futures.ThreadPoolExecutor() as pool:
+ await asyncio.get_running_loop().run_in_executor(
+ pool,
+ functools.partial(my_bucket.upload_file, full_tree_path, full_tree_path.name),
+ )
+ await asyncio.get_running_loop().run_in_executor(
+ pool, functools.partial(my_bucket.upload_file, diff_path, diff_path.name)
+ )
+ except ClientError as e:
+ log.error(f"failed uploading file to aws {type(e).__name__} {e}")
+ return web.json_response({"uploaded": False})
+ except Exception as e:
+ log.error(f"failed handling request {request} {type(e).__name__} {e}")
+ return web.json_response({"uploaded": False})
+ return web.json_response({"uploaded": True})
+
+ async def healthz(self, request: web.Request) -> web.Response:
+ return web.json_response({"success": True})
+
+ async def plugin_info(self, request: web.Request) -> web.Response:
+ return web.json_response(
+ {
+ "name": plugin_name,
+ "version": plugin_version,
+ "instance": self.instance_name,
+ }
+ )
+
+ async def handle_download(self, request: web.Request) -> web.Response:
+ self.update_instance_from_config()
+ try:
+ data = await request.json()
+ except Exception as e:
+ log.error(f"failed parsing request {request} {type(e).__name__} {e}")
+ return web.json_response({"handle_download": False})
+
+ store_id = bytes32.from_hexstr(data["store_id"])
+ parse_result = urlparse(data["url"])
+ for store in self.stores:
+ if store.id == store_id and parse_result.scheme == "s3" and data["url"] in store.urls:
+ return web.json_response({"handle_download": True, "urls": list(store.urls)})
+
+ return web.json_response({"handle_download": False})
+
+ async def download(self, request: web.Request) -> web.Response:
+ try:
+ data = await request.json()
+ url = data["url"]
+ filename = data["filename"]
+
+ # filename must follow the DataLayer naming convention
+ if not is_filename_valid(filename):
+ return web.json_response({"downloaded": False})
+
+ # Pull the store_id from the filename to make sure we only download for configured stores
+ filename_tree_id = bytes32.fromhex(filename[:64])
+ parse_result = urlparse(url)
+ should_download = False
+ for store in self.stores:
+ if store.id == filename_tree_id and parse_result.scheme == "s3" and url in store.urls:
+ should_download = True
+ break
+
+ if not should_download:
+ return web.json_response({"downloaded": False})
+
+ bucket_str = parse_result.netloc
+ my_bucket = self.boto_resource.Bucket(bucket_str)
+ target_filename = self.server_files_path.joinpath(filename)
+ # Create folder for parent directory
+ target_filename.parent.mkdir(parents=True, exist_ok=True)
+ log.info(f"downloading {url} to {target_filename}...")
+ with concurrent.futures.ThreadPoolExecutor() as pool:
+ await asyncio.get_running_loop().run_in_executor(
+ pool, functools.partial(my_bucket.download_file, filename, str(target_filename))
+ )
+ except Exception as e:
+ log.error(f"failed parsing request {request} {type(e).__name__} {e}")
+ return web.json_response({"downloaded": False})
+ return web.json_response({"downloaded": True})
+
+ async def add_missing_files(self, request: web.Request) -> web.Response:
+ try:
+ data = await request.json()
+ store_id = bytes32.from_hexstr(data["store_id"])
+ bucket_str = self.get_bucket(store_id)
+ files = json.loads(data["files"])
+ my_bucket = self.boto_resource.Bucket(bucket_str)
+ existing_file_list = []
+ for my_bucket_object in my_bucket.objects.all():
+ existing_file_list.append(my_bucket_object.key)
+ try:
+ for file_name in files:
+ # filenames must follow the DataLayer naming convention
+ if not is_filename_valid(file_name):
+ log.error(f"failed uploading file {file_name}, invalid file name")
+ continue
+
+ # Pull the store_id from the filename to make sure we only upload for configured stores
+ if not (bytes32.fromhex(file_name[:64]) == store_id):
+ log.error(f"failed uploading file {file_name}, store id mismatch")
+ continue
+
+ file_path = self.server_files_path.joinpath(file_name)
+ if not os.path.isfile(file_path):
+ log.error(f"failed uploading file to aws, file {file_path} does not exist")
+ continue
+
+ if file_name in existing_file_list:
+ log.debug(f"skip {file_name} already in bucket")
+ continue
+
+ with concurrent.futures.ThreadPoolExecutor() as pool:
+ await asyncio.get_running_loop().run_in_executor(
+ pool,
+ functools.partial(my_bucket.upload_file, file_path, file_name),
+ )
+ except ClientError as e:
+ log.error(f"failed uploading file to aws {e}")
+ return web.json_response({"uploaded": False})
+ except Exception as e:
+ log.error(f"failed handling request {request} {e}")
+ return web.json_response({"uploaded": False})
+ return web.json_response({"uploaded": True})
+
+ def get_bucket(self, store_id: bytes32) -> str:
+ for store in self.stores:
+ if store.id == store_id and store.bucket:
+ return store.bucket
+
+ raise Exception(f"bucket not found for store id {store_id.hex()}")
+
+ def update_instance_from_config(self) -> None:
+ config = load_config(self.instance_name)
+ self.stores = read_store_ids_from_config(config)
+
+ def update_config(self) -> None:
+ with open("s3_plugin_config.yml", "r") as file:
+ full_config = yaml.safe_load(file)
+
+ full_config[self.instance_name]["stores"] = [store.marshal() for store in self.stores]
+ self.save_config("s3_plugin_config.yml", full_config)
+
+ def save_config(self, filename: str, config_data: Any) -> None:
+ path: Path = Path(filename)
+ with tempfile.TemporaryDirectory(dir=path.parent) as tmp_dir:
+ tmp_path: Path = Path(tmp_dir) / Path(filename)
+ with open(tmp_path, "w") as f:
+ yaml.safe_dump(config_data, f)
+ try:
+ os.replace(str(tmp_path), path)
+ except PermissionError:
+ shutil.move(str(tmp_path), str(path))
+
+
+def read_store_ids_from_config(config: Dict[str, Any]) -> List[StoreConfig]:
+ stores = []
+ for store in config.get("stores", []):
+ try:
+ stores.append(StoreConfig.unmarshal(store))
+ except Exception as e:
+ if "store_id" in store:
+ bad_store_id = f"{store['store_id']!r}"
+ else:
+ bad_store_id = "<missing>"
+ log.info(f"Ignoring invalid store id: {bad_store_id}: {type(e).__name__} {e}")
+ pass
+
+ return stores
+
+
+def make_app(config: Dict[str, Any], instance_name: str) -> web.Application:
+ try:
+ region = config["aws_credentials"]["region"]
+ aws_access_key_id = config["aws_credentials"]["access_key_id"]
+ aws_secret_access_key = config["aws_credentials"]["secret_access_key"]
+ server_files_location = config["server_files_location"]
+ server_files_path = Path(server_files_location).resolve()
+ except KeyError as e:
+ sys.exit(
+ "config file must have server_files_location, aws_credentials with region, access_key_id. "
+ f", and secret_access_key. Missing config key: {e.args[0]!r}"
+ )
+
+ log_level = config.get("log_level", "INFO")
+ log.setLevel(log_level)
+ fh = logging.FileHandler(config.get("log_filename", "s3_plugin.log"))
+ fh.setLevel(log_level)
+ # create formatter and add it to the handlers
+ file_log_formatter = logging.Formatter(
+ fmt="%(asctime)s.%(msecs)03d %(name)s %(levelname)s %(message)s", datefmt="%Y-%m-%dT%H:%M:%S"
+ )
+
+ fh.setFormatter(file_log_formatter)
+ # add the handlers to logger
+ log.addHandler(fh)
+
+ stores = read_store_ids_from_config(config)
+
+ s3_client = S3Plugin(
+ region=region,
+ aws_access_key_id=aws_access_key_id,
+ aws_secret_access_key=aws_secret_access_key,
+ server_files_path=server_files_path,
+ stores=stores,
+ instance_name=instance_name,
+ )
+ app = web.Application()
+ app.add_routes([web.post("/handle_upload", s3_client.handle_upload)])
+ app.add_routes([web.post("/upload", s3_client.upload)])
+ app.add_routes([web.post("/handle_download", s3_client.handle_download)])
+ app.add_routes([web.post("/download", s3_client.download)])
+ app.add_routes([web.post("/add_store_id", s3_client.add_store_id)])
+ app.add_routes([web.post("/remove_store_id", s3_client.remove_store_id)])
+ app.add_routes([web.post("/add_missing_files", s3_client.add_missing_files)])
+ app.add_routes([web.post("/plugin_info", s3_client.plugin_info)])
+ app.add_routes([web.post("/healthz", s3_client.healthz)])
+ log.info(f"Starting s3 plugin {instance_name} on port {config['port']}")
+ return app
+
+
+def load_config(instance: str) -> Any:
+ with open("s3_plugin_config.yml", "r") as f:
+ full_config = yaml.safe_load(f)
+ return full_config[instance]
+
+
+def run_server() -> None:
+ instance_name = sys.argv[1]
+ try:
+ config = load_config(instance_name)
+ except KeyError:
+ sys.exit(f"Config for instance {instance_name} not found.")
+
+ if not config:
+ sys.exit(f"Config for instance {instance_name} is empty.")
+
+ try:
+ port = config["port"]
+ except KeyError:
+ sys.exit("Missing port in config file.")
+
+ web.run_app(make_app(config, instance_name), port=port, host="localhost")
+ log.info(f"Stopped s3 plugin {instance_name}")
+
+
+if __name__ == "__main__":
+ run_server()
diff --git a/chia/farmer/farmer.py b/chia/farmer/farmer.py
--- a/chia/farmer/farmer.py
+++ b/chia/farmer/farmer.py
@@ -653,13 +653,13 @@ async def generate_login_link(self, launcher_id: bytes32) -> Optional[str]:
async def get_harvesters(self, counts_only: bool = False) -> Dict[str, Any]:
harvesters: List[Dict[str, Any]] = []
for connection in self.server.get_connections(NodeType.HARVESTER):
- self.log.debug(f"get_harvesters host: {connection.peer_host}, node_id: {connection.peer_node_id}")
+ self.log.debug(f"get_harvesters host: {connection.peer_info.host}, node_id: {connection.peer_node_id}")
receiver = self.plot_sync_receivers.get(connection.peer_node_id)
if receiver is not None:
harvesters.append(receiver.to_dict(counts_only))
else:
self.log.debug(
- f"get_harvesters invalid peer: {connection.peer_host}, node_id: {connection.peer_node_id}"
+ f"get_harvesters invalid peer: {connection.peer_info.host}, node_id: {connection.peer_node_id}"
)
return {"harvesters": harvesters}
diff --git a/chia/farmer/farmer_api.py b/chia/farmer/farmer_api.py
--- a/chia/farmer/farmer_api.py
+++ b/chia/farmer/farmer_api.py
@@ -34,8 +34,10 @@
from chia.types.blockchain_format.proof_of_space import (
generate_plot_public_key,
generate_taproot_sk,
+ get_plot_id,
verify_and_get_quality_string,
)
+from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.api_decorators import api_request
from chia.util.ints import uint32, uint64
@@ -92,7 +94,8 @@ async def new_proof_of_space(self, new_proof_of_space: harvester_protocol.NewPro
new_proof_of_space.sp_hash,
)
if computed_quality_string is None:
- self.farmer.log.error(f"Invalid proof of space {new_proof_of_space.proof}")
+ plotid: bytes32 = get_plot_id(new_proof_of_space.proof)
+ self.farmer.log.error(f"Invalid proof of space: {plotid.hex()} proof: {new_proof_of_space.proof}")
return None
self.farmer.number_of_responses[new_proof_of_space.sp_hash] += 1
diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py
--- a/chia/full_node/full_node.py
+++ b/chia/full_node/full_node.py
@@ -280,8 +280,8 @@ def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[st
con_dict: Dict[str, Any] = {
"type": con.connection_type,
"local_port": con.local_port,
- "peer_host": con.peer_host,
- "peer_port": con.peer_port,
+ "peer_host": con.peer_info.host,
+ "peer_port": con.peer_info.port,
"peer_server_port": con.peer_server_port,
"node_id": con.peer_node_id,
"creation_time": con.creation_time,
@@ -988,7 +988,8 @@ async def _sync(self) -> None:
self.log.info(f"Total of {len(peers_with_peak)} peers with peak {target_peak.height}")
weight_proof_peer: WSChiaConnection = random.choice(peers_with_peak)
self.log.info(
- f"Requesting weight proof from peer {weight_proof_peer.peer_host} up to height {target_peak.height}"
+ f"Requesting weight proof from peer {weight_proof_peer.peer_info.host} "
+ f"up to height {target_peak.height}"
)
cur_peak: Optional[BlockRecord] = self.blockchain.get_peak()
if cur_peak is not None and target_peak.weight <= cur_peak.weight:
@@ -1006,20 +1007,22 @@ async def _sync(self) -> None:
# Disconnect from this peer, because they have not behaved properly
if response is None or not isinstance(response, full_node_protocol.RespondProofOfWeight):
await weight_proof_peer.close(600)
- raise RuntimeError(f"Weight proof did not arrive in time from peer: {weight_proof_peer.peer_host}")
+ raise RuntimeError(f"Weight proof did not arrive in time from peer: {weight_proof_peer.peer_info.host}")
if response.wp.recent_chain_data[-1].reward_chain_block.height != target_peak.height:
await weight_proof_peer.close(600)
- raise RuntimeError(f"Weight proof had the wrong height: {weight_proof_peer.peer_host}")
+ raise RuntimeError(f"Weight proof had the wrong height: {weight_proof_peer.peer_info.host}")
if response.wp.recent_chain_data[-1].reward_chain_block.weight != target_peak.weight:
await weight_proof_peer.close(600)
- raise RuntimeError(f"Weight proof had the wrong weight: {weight_proof_peer.peer_host}")
+ raise RuntimeError(f"Weight proof had the wrong weight: {weight_proof_peer.peer_info.host}")
# dont sync to wp if local peak is heavier,
# dont ban peer, we asked for this peak
current_peak = self.blockchain.get_peak()
if current_peak is not None:
if response.wp.recent_chain_data[-1].reward_chain_block.weight <= current_peak.weight:
- raise RuntimeError(f"current peak is heavier than Weight proof peek: {weight_proof_peer.peer_host}")
+ raise RuntimeError(
+ f"current peak is heavier than Weight proof peek: {weight_proof_peer.peer_info.host}"
+ )
try:
validated, fork_point, summaries = await self.weight_proof_handler.validate_weight_proof(response.wp)
@@ -1075,19 +1078,16 @@ async def fetch_block_batches(
fetched = False
for peer in random.sample(new_peers_with_peak, len(new_peers_with_peak)):
if peer.closed:
- peers_with_peak.remove(peer)
continue
response = await peer.call_api(FullNodeAPI.request_blocks, request, timeout=30)
if response is None:
await peer.close()
- peers_with_peak.remove(peer)
elif isinstance(response, RespondBlocks):
await batch_queue.put((peer, response.blocks))
fetched = True
break
if fetched is False:
self.log.error(f"failed fetching {start_height} to {end_height} from peers")
- await batch_queue.put(None)
return
if self.sync_store.peers_changed.is_set():
new_peers_with_peak = self.get_peers_with_peak(peak_hash)
@@ -1114,8 +1114,6 @@ async def validate_block_batches(
blocks, peer, None if advanced_peak else uint32(fork_point_height), summaries
)
if success is False:
- if peer in peers_with_peak:
- peers_with_peak.remove(peer)
await peer.close(600)
raise ValueError(f"Failed to validate block batch {start_height} to {end_height}")
self.log.info(f"Added blocks {start_height} to {end_height}")
@@ -1881,11 +1879,7 @@ async def add_unfinished_block(
# blockchain.run_generator throws on errors, so npc_result is
# guaranteed to represent a successful run
assert npc_result.conds is not None
- pairs_pks, pairs_msgs = pkm_pairs(
- npc_result.conds,
- self.constants.AGG_SIG_ME_ADDITIONAL_DATA,
- soft_fork=height >= self.constants.SOFT_FORK_HEIGHT,
- )
+ pairs_pks, pairs_msgs = pkm_pairs(npc_result.conds, self.constants.AGG_SIG_ME_ADDITIONAL_DATA)
if not cached_bls.aggregate_verify(
pairs_pks, pairs_msgs, block.transactions_info.aggregated_signature, True
):
@@ -2209,8 +2203,9 @@ async def add_transaction(
return MempoolInclusionStatus.FAILED, Err.ALREADY_INCLUDING_TRANSACTION
self.mempool_manager.add_and_maybe_pop_seen(spend_name)
self.log.debug(f"Processing transaction: {spend_name}")
- # Ignore if syncing
- if self.sync_store.get_sync_mode():
+ # Ignore if syncing or if we have not yet received a block
+ # the mempool must have a peak to validate transactions
+ if self.sync_store.get_sync_mode() or self.mempool_manager.peak is None:
status = MempoolInclusionStatus.FAILED
error: Optional[Err] = Err.NO_TRANSACTIONS_WHILE_SYNCING
self.mempool_manager.remove_seen(spend_name)
@@ -2402,6 +2397,11 @@ async def _replace_proof(
raise
async def add_compact_proof_of_time(self, request: timelord_protocol.RespondCompactProofOfTime) -> None:
+ peak = self.blockchain.get_peak()
+ if peak is None or peak.height - request.height < 5:
+ self.log.info(f"Ignoring add_compact_proof_of_time, height {request.height} too recent.")
+ return None
+
field_vdf = CompressibleVDFField(int(request.field_vdf))
if not await self._can_accept_compact_proof(
request.vdf_info, request.vdf_proof, request.height, request.header_hash, field_vdf
@@ -2421,6 +2421,10 @@ async def add_compact_proof_of_time(self, request: timelord_protocol.RespondComp
await self.server.send_to_all([msg], NodeType.FULL_NODE)
async def new_compact_vdf(self, request: full_node_protocol.NewCompactVDF, peer: WSChiaConnection) -> None:
+ peak = self.blockchain.get_peak()
+ if peak is None or peak.height - request.height < 5:
+ self.log.info(f"Ignoring new_compact_vdf, height {request.height} too recent.")
+ return None
is_fully_compactified = await self.block_store.is_fully_compactified(request.header_hash)
if is_fully_compactified is None or is_fully_compactified:
return None
diff --git a/chia/full_node/full_node_api.py b/chia/full_node/full_node_api.py
--- a/chia/full_node/full_node_api.py
+++ b/chia/full_node/full_node_api.py
@@ -94,7 +94,7 @@ async def request_peers(
) -> Optional[Message]:
if peer.peer_server_port is None:
return None
- peer_info = PeerInfo(peer.peer_host, peer.peer_server_port)
+ peer_info = PeerInfo(peer.peer_info.host, peer.peer_server_port)
if self.full_node.full_node_peers is not None:
msg = await self.full_node.full_node_peers.request_peers(peer_info)
return msg
@@ -132,6 +132,7 @@ async def new_peak(self, request: full_node_protocol.NewPeak, peer: WSChiaConnec
async with self.full_node.new_peak_sem.acquire():
await self.full_node.new_peak(request, peer)
except LimitedSemaphoreFullError:
+ self.log.debug("Ignoring NewPeak, limited semaphore full: %s %s", peer.get_peer_logging(), request)
return None
return None
@@ -754,9 +755,8 @@ async def declare_proof_of_space(
self.full_node.log.error(f"Error making spend bundle {e} peak: {peak}")
mempool_bundle = None
if mempool_bundle is not None:
- spend_bundle = mempool_bundle[0]
- additions = mempool_bundle[1]
- removals = mempool_bundle[2]
+ spend_bundle, additions = mempool_bundle
+ removals = spend_bundle.removals()
self.full_node.log.info(f"Add rem: {len(additions)} {len(removals)}")
aggregate_signature = spend_bundle.aggregated_signature
if self.full_node.full_node_store.previous_generator is not None:
@@ -1302,17 +1302,13 @@ async def request_puzzle_solution(self, request: wallet_protocol.RequestPuzzleSo
block_generator: Optional[BlockGenerator] = await self.full_node.blockchain.get_block_generator(block)
assert block_generator is not None
- error, puzzle, solution = await asyncio.get_running_loop().run_in_executor(
- self.executor, get_puzzle_and_solution_for_coin, block_generator, coin_record.coin
- )
-
- if error is not None:
+ try:
+ spend_info = await asyncio.get_running_loop().run_in_executor(
+ self.executor, get_puzzle_and_solution_for_coin, block_generator, coin_record.coin
+ )
+ except ValueError:
return reject_msg
-
- assert puzzle is not None
- assert solution is not None
-
- wrapper = PuzzleSolutionResponse(coin_name, height, puzzle, solution)
+ wrapper = PuzzleSolutionResponse(coin_name, height, spend_info.puzzle, spend_info.solution)
response = wallet_protocol.RespondPuzzleSolution(wrapper)
response_msg = make_msg(ProtocolMessageTypes.respond_puzzle_solution, response)
return response_msg
@@ -1401,11 +1397,30 @@ async def request_header_blocks(self, request: wallet_protocol.RequestHeaderBloc
)
return msg
- @api_request()
- async def respond_compact_proof_of_time(self, request: timelord_protocol.RespondCompactProofOfTime) -> None:
+ @api_request(bytes_required=True, execute_task=True)
+ async def respond_compact_proof_of_time(
+ self, request: timelord_protocol.RespondCompactProofOfTime, request_bytes: bytes = b""
+ ) -> None:
if self.full_node.sync_store.get_sync_mode():
return None
- await self.full_node.add_compact_proof_of_time(request)
+ name = std_hash(request_bytes)
+ if name in self.full_node.compact_vdf_requests:
+ self.log.debug(f"Ignoring CompactProofOfTime: {request}, already requested")
+ return None
+
+ self.full_node.compact_vdf_requests.add(name)
+
+ # this semaphore will only allow a limited number of tasks call
+ # new_compact_vdf() at a time, since it can be expensive
+ try:
+ async with self.full_node.compact_vdf_sem.acquire():
+ try:
+ await self.full_node.add_compact_proof_of_time(request)
+ finally:
+ self.full_node.compact_vdf_requests.remove(name)
+ except LimitedSemaphoreFullError:
+ self.log.debug(f"Ignoring CompactProofOfTime: {request}, _waiters")
+
return None
@api_request(peer_required=True, bytes_required=True, execute_task=True)
@@ -1417,7 +1432,7 @@ async def new_compact_vdf(
name = std_hash(request_bytes)
if name in self.full_node.compact_vdf_requests:
- self.log.debug(f"Ignoring NewCompactVDF: {request}, already requested")
+ self.log.debug("Ignoring NewCompactVDF, already requested: %s %s", peer.get_peer_logging(), request)
return None
self.full_node.compact_vdf_requests.add(name)
@@ -1430,7 +1445,7 @@ async def new_compact_vdf(
finally:
self.full_node.compact_vdf_requests.remove(name)
except LimitedSemaphoreFullError:
- self.log.debug(f"Ignoring NewCompactVDF: {request}, _waiters")
+ self.log.debug("Ignoring NewCompactVDF, limited semaphore full: %s %s", peer.get_peer_logging(), request)
return None
return None
diff --git a/chia/full_node/generator.py b/chia/full_node/generator.py
--- a/chia/full_node/generator.py
+++ b/chia/full_node/generator.py
@@ -1,26 +1,23 @@
from __future__ import annotations
import logging
-from typing import List, Optional, Tuple, Union
+from typing import List, Optional, Union
from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.serialized_program import SerializedProgram
from chia.types.generator_types import BlockGenerator, CompressorArg, GeneratorBlockCacheInterface
from chia.util.ints import uint32
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
-from chia.wallet.puzzles.rom_bootstrap_generator import get_generator
-GENERATOR_MOD = get_generator()
-
-DECOMPRESS_BLOCK = load_clvm_maybe_recompile("block_program_zero.clvm", package_or_requirement="chia.wallet.puzzles")
-DECOMPRESS_PUZZLE = load_clvm_maybe_recompile("decompress_puzzle.clvm", package_or_requirement="chia.wallet.puzzles")
+DECOMPRESS_BLOCK = load_clvm_maybe_recompile("block_program_zero.clsp", package_or_requirement="chia.wallet.puzzles")
+DECOMPRESS_PUZZLE = load_clvm_maybe_recompile("decompress_puzzle.clsp", package_or_requirement="chia.wallet.puzzles")
# DECOMPRESS_CSE = load_clvm_maybe_recompile(
-# "decompress_coin_spend_entry.clvm",
+# "decompress_coin_spend_entry.clsp",
# package_or_requirement="chia.wallet.puzzles",
# )
DECOMPRESS_CSE_WITH_PREFIX = load_clvm_maybe_recompile(
- "decompress_coin_spend_entry_with_prefix.clvm", package_or_requirement="chia.wallet.puzzles"
+ "decompress_coin_spend_entry_with_prefix.clsp", package_or_requirement="chia.wallet.puzzles"
)
log = logging.getLogger(__name__)
@@ -41,15 +38,6 @@ def create_block_generator(
return BlockGenerator(generator, generator_list, generator_heights)
-def create_generator_args(generator_ref_list: List[SerializedProgram]) -> Program:
- """
- `create_generator_args`: The format and contents of these arguments affect consensus.
- """
- gen_ref_list = [bytes(g) for g in generator_ref_list]
- ret: Program = Program.to([gen_ref_list])
- return ret
-
-
def create_compressed_generator(
original_generator: CompressorArg,
compressed_cse_list: List[List[List[Union[bytes, None, int, Program]]]],
@@ -64,19 +52,3 @@ def create_compressed_generator(
DECOMPRESS_PUZZLE, DECOMPRESS_CSE_WITH_PREFIX, Program.to(start), Program.to(end), compressed_cse_list
)
return BlockGenerator(program, [original_generator.generator], [original_generator.block_height])
-
-
-def setup_generator_args(self: BlockGenerator) -> Tuple[SerializedProgram, Program]:
- args = create_generator_args(self.generator_refs)
- return self.program, args
-
-
-def run_generator_mempool(self: BlockGenerator, max_cost: int) -> Tuple[int, SerializedProgram]:
- program, args = setup_generator_args(self)
- return GENERATOR_MOD.run_mempool_with_cost(max_cost, program, args)
-
-
-def run_generator_unsafe(self: BlockGenerator, max_cost: int) -> Tuple[int, SerializedProgram]:
- """This mode is meant for accepting possibly soft-forked transactions into the mempool"""
- program, args = setup_generator_args(self)
- return GENERATOR_MOD.run_with_cost(max_cost, program, args)
diff --git a/chia/full_node/mempool.py b/chia/full_node/mempool.py
--- a/chia/full_node/mempool.py
+++ b/chia/full_node/mempool.py
@@ -1,29 +1,36 @@
from __future__ import annotations
+import logging
import sqlite3
-from dataclasses import dataclass
from datetime import datetime
from enum import Enum
-from typing import Dict, Iterator, List, Optional
+from typing import Callable, Dict, Iterator, List, Optional, Tuple
-from chia.consensus.cost_calculator import NPCResult
+from blspy import AugSchemeMPL, G2Element
+from chia_rs import Coin
+
+from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.full_node.fee_estimation import FeeMempoolInfo, MempoolInfo, MempoolItemInfo
from chia.full_node.fee_estimator_interface import FeeEstimatorInterface
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.clvm_cost import CLVMCost
+from chia.types.coin_spend import CoinSpend
+from chia.types.eligible_coin_spends import EligibleCoinSpends
+from chia.types.internal_mempool_item import InternalMempoolItem
from chia.types.mempool_item import MempoolItem
from chia.types.spend_bundle import SpendBundle
from chia.util.chunks import chunks
from chia.util.db_wrapper import SQLITE_MAX_VARIABLE_NUMBER
+from chia.util.errors import Err
from chia.util.ints import uint32, uint64
+log = logging.getLogger(__name__)
+
# We impose a limit on the fee a single transaction can pay in order to have the
# sum of all fees in the mempool be less than 2^63. That's the limit of sqlite's
# integers, which we rely on for computing fee per cost as well as the fee sum
MEMPOOL_ITEM_FEE_LIMIT = 2**50
-SQLITE_NO_GENERATED_COLUMNS: bool = sqlite3.sqlite_version_info < (3, 31, 0)
-
class MempoolRemoveReason(Enum):
CONFLICT = 1
@@ -32,49 +39,49 @@ class MempoolRemoveReason(Enum):
EXPIRED = 4
-@dataclass(frozen=True)
-class InternalMempoolItem:
- spend_bundle: SpendBundle
- npc_result: NPCResult
- height_added_to_mempool: uint32
-
-
class Mempool:
_db_conn: sqlite3.Connection
# it's expensive to serialize and deserialize G2Element, so we keep those in
# this separate dictionary
_items: Dict[bytes32, InternalMempoolItem]
+ # the most recent block height and timestamp that we know of
+ _block_height: uint32
+ _timestamp: uint64
+
def __init__(self, mempool_info: MempoolInfo, fee_estimator: FeeEstimatorInterface):
self._db_conn = sqlite3.connect(":memory:")
self._items = {}
+ self._block_height = uint32(0)
+ self._timestamp = uint64(0)
with self._db_conn:
# name means SpendBundle hash
# assert_height may be NIL
- generated = ""
- if not SQLITE_NO_GENERATED_COLUMNS:
- generated = " GENERATED ALWAYS AS (fee / cost) VIRTUAL"
-
+ # the seq field indicates the order of items being added to the
+ # mempool. It's used as a tie-breaker for items with the same fee
+ # rate
+ # TODO: In the future, for the "fee_per_cost" field, opt for
+ # "GENERATED ALWAYS AS (CAST(fee AS REAL) / cost) VIRTUAL"
self._db_conn.execute(
- f"""CREATE TABLE tx(
- name BLOB PRIMARY KEY,
+ """CREATE TABLE tx(
+ name BLOB,
cost INT NOT NULL,
fee INT NOT NULL,
assert_height INT,
assert_before_height INT,
assert_before_seconds INT,
- fee_per_cost REAL{generated})
+ fee_per_cost REAL,
+ seq INTEGER PRIMARY KEY AUTOINCREMENT)
"""
)
+ self._db_conn.execute("CREATE INDEX name_idx ON tx(name)")
self._db_conn.execute("CREATE INDEX fee_sum ON tx(fee)")
self._db_conn.execute("CREATE INDEX cost_sum ON tx(cost)")
self._db_conn.execute("CREATE INDEX feerate ON tx(fee_per_cost)")
self._db_conn.execute(
- "CREATE INDEX assert_before_height ON tx(assert_before_height) WHERE assert_before_height != NULL"
- )
- self._db_conn.execute(
- "CREATE INDEX assert_before_seconds ON tx(assert_before_seconds) WHERE assert_before_seconds != NULL"
+ "CREATE INDEX assert_before ON tx(assert_before_height, assert_before_seconds) "
+ "WHERE assert_before_height IS NOT NULL OR assert_before_seconds IS NOT NULL"
)
# This table maps coin IDs to spend bundles hashes
@@ -96,12 +103,22 @@ def __del__(self) -> None:
def _row_to_item(self, row: sqlite3.Row) -> MempoolItem:
name = bytes32(row[0])
- fee = int(row[1])
- assert_height = row[2]
+ fee = int(row[2])
+ assert_height = row[3]
+ assert_before_height = row[4]
+ assert_before_seconds = row[5]
item = self._items[name]
return MempoolItem(
- item.spend_bundle, uint64(fee), item.npc_result, name, uint32(item.height_added_to_mempool), assert_height
+ item.spend_bundle,
+ uint64(fee),
+ item.npc_result,
+ name,
+ uint32(item.height_added_to_mempool),
+ assert_height,
+ assert_before_height,
+ assert_before_seconds,
+ bundle_coin_spends=item.bundle_coin_spends,
)
def total_mempool_fees(self) -> int:
@@ -116,22 +133,22 @@ def total_mempool_cost(self) -> CLVMCost:
val = cursor.fetchone()[0]
return CLVMCost(uint64(0) if val is None else uint64(val))
- def all_spends(self) -> Iterator[MempoolItem]:
+ def all_items(self) -> Iterator[MempoolItem]:
with self._db_conn:
- cursor = self._db_conn.execute("SELECT name, fee, assert_height FROM tx")
+ cursor = self._db_conn.execute("SELECT * FROM tx")
for row in cursor:
yield self._row_to_item(row)
- def all_spend_ids(self) -> List[bytes32]:
+ def all_item_ids(self) -> List[bytes32]:
with self._db_conn:
cursor = self._db_conn.execute("SELECT name FROM tx")
return [bytes32(row[0]) for row in cursor]
# TODO: move "process_mempool_items()" into this class in order to do this a
# bit more efficiently
- def spends_by_feerate(self) -> Iterator[MempoolItem]:
+ def items_by_feerate(self) -> Iterator[MempoolItem]:
with self._db_conn:
- cursor = self._db_conn.execute("SELECT name, fee, assert_height FROM tx ORDER BY fee_per_cost DESC")
+ cursor = self._db_conn.execute("SELECT * FROM tx ORDER BY fee_per_cost DESC, seq ASC")
for row in cursor:
yield self._row_to_item(row)
@@ -141,21 +158,32 @@ def size(self) -> int:
val = cursor.fetchone()
return 0 if val is None else int(val[0])
- def get_spend_by_id(self, spend_bundle_id: bytes32) -> Optional[MempoolItem]:
+ def get_item_by_id(self, item_id: bytes32) -> Optional[MempoolItem]:
with self._db_conn:
- cursor = self._db_conn.execute("SELECT name, fee, assert_height FROM tx WHERE name=?", (spend_bundle_id,))
+ cursor = self._db_conn.execute("SELECT * FROM tx WHERE name=?", (item_id,))
row = cursor.fetchone()
return None if row is None else self._row_to_item(row)
# TODO: we need a bulk lookup function like this too
- def get_spends_by_coin_id(self, spent_coin_id: bytes32) -> List[MempoolItem]:
+ def get_items_by_coin_id(self, spent_coin_id: bytes32) -> List[MempoolItem]:
with self._db_conn:
cursor = self._db_conn.execute(
- "SELECT name, fee, assert_height FROM tx WHERE name in (SELECT tx FROM spends WHERE coin_id=?)",
+ "SELECT * FROM tx WHERE name in (SELECT tx FROM spends WHERE coin_id=?)",
(spent_coin_id,),
)
return [self._row_to_item(row) for row in cursor]
+ def get_items_by_coin_ids(self, spent_coin_ids: List[bytes32]) -> List[MempoolItem]:
+ items: List[MempoolItem] = []
+ for coin_ids in chunks(spent_coin_ids, SQLITE_MAX_VARIABLE_NUMBER):
+ args = ",".join(["?"] * len(coin_ids))
+ with self._db_conn:
+ cursor = self._db_conn.execute(
+ f"SELECT * FROM tx WHERE name IN (SELECT tx FROM spends WHERE coin_id IN ({args}))", tuple(coin_ids)
+ )
+ items.extend(self._row_to_item(row) for row in cursor)
+ return items
+
def get_min_fee_rate(self, cost: int) -> float:
"""
Gets the minimum fpc rate that a transaction with specified cost will need in order to get included.
@@ -167,7 +195,7 @@ def get_min_fee_rate(self, cost: int) -> float:
# Iterates through all spends in increasing fee per cost
with self._db_conn:
- cursor = self._db_conn.execute("SELECT cost,fee_per_cost FROM tx ORDER BY fee_per_cost ASC")
+ cursor = self._db_conn.execute("SELECT cost,fee_per_cost FROM tx ORDER BY fee_per_cost ASC, seq DESC")
item_cost: int
fee_per_cost: float
@@ -197,6 +225,8 @@ def new_tx_block(self, block_height: uint32, timestamp: uint64) -> None:
to_remove = [bytes32(row[0]) for row in cursor]
self.remove_from_pool(to_remove, MempoolRemoveReason.EXPIRED)
+ self._block_height = block_height
+ self._timestamp = timestamp
def remove_from_pool(self, items: List[bytes32], reason: MempoolRemoveReason) -> None:
"""
@@ -207,7 +237,6 @@ def remove_from_pool(self, items: List[bytes32], reason: MempoolRemoveReason) ->
removed_items: List[MempoolItemInfo] = []
if reason != MempoolRemoveReason.BLOCK_INCLUSION:
-
for spend_bundle_ids in chunks(items, SQLITE_MAX_VARIABLE_NUMBER):
args = ",".join(["?"] * len(spend_bundle_ids))
with self._db_conn:
@@ -236,59 +265,98 @@ def remove_from_pool(self, items: List[bytes32], reason: MempoolRemoveReason) ->
for iteminfo in removed_items:
self.fee_estimator.remove_mempool_item(info, iteminfo)
- def add_to_pool(self, item: MempoolItem) -> None:
+ def add_to_pool(self, item: MempoolItem) -> Optional[Err]:
"""
Adds an item to the mempool by kicking out transactions (if it doesn't fit), in order of increasing fee per cost
"""
assert item.fee < MEMPOOL_ITEM_FEE_LIMIT
assert item.npc_result.conds is not None
+ assert item.cost <= self.mempool_info.max_block_clvm_cost
- # TODO: this block could be simplified by removing all items in a single
- # SQL query. Or at least figure out which items to remove and then
- # remove them all in a single call to remove_from_pool()
with self._db_conn:
- while self.at_full_capacity(item.cost):
- # pick the item with the lowest fee per cost to remove
- cursor = self._db_conn.execute("SELECT name FROM tx ORDER BY fee_per_cost ASC LIMIT 1")
- name = bytes32(cursor.fetchone()[0])
- self.remove_from_pool([name], MempoolRemoveReason.POOL_FULL)
-
- if SQLITE_NO_GENERATED_COLUMNS:
- self._db_conn.execute(
- "INSERT INTO tx VALUES(?, ?, ?, ?, ?, ?, ?)",
- (
- item.name,
- item.cost,
- item.fee,
- item.assert_height,
- item.assert_before_height,
- item.assert_before_seconds,
- item.fee / item.cost,
- ),
+ # we have certain limits on transactions that will expire soon
+ # (in the next 15 minutes)
+ block_cutoff = self._block_height + 48
+ time_cutoff = self._timestamp + 900
+ if (item.assert_before_height is not None and item.assert_before_height < block_cutoff) or (
+ item.assert_before_seconds is not None and item.assert_before_seconds < time_cutoff
+ ):
+ # this lists only transactions that expire soon, in order of
+ # lowest fee rate along with the cumulative cost of such
+ # transactions counting from highest to lowest fee rate
+ cursor = self._db_conn.execute(
+ """
+ SELECT name,
+ fee_per_cost,
+ SUM(cost) OVER (ORDER BY fee_per_cost DESC, seq ASC) AS cumulative_cost
+ FROM tx
+ WHERE assert_before_seconds IS NOT NULL AND assert_before_seconds < ?
+ OR assert_before_height IS NOT NULL AND assert_before_height < ?
+ ORDER BY cumulative_cost DESC
+ """,
+ (time_cutoff, block_cutoff),
)
- else:
- self._db_conn.execute(
- "INSERT INTO tx VALUES(?, ?, ?, ?, ?, ?)",
- (
- item.name,
- item.cost,
- item.fee,
- item.assert_height,
- item.assert_before_height,
- item.assert_before_seconds,
- ),
+ to_remove: List[bytes32] = []
+ for row in cursor:
+ name, fee_per_cost, cumulative_cost = row
+
+ # there's space for us, stop pruning
+ if cumulative_cost + item.cost <= self.mempool_info.max_block_clvm_cost:
+ break
+
+ # we can't evict any more transactions, abort (and don't
+ # evict what we put aside in "to_remove" list)
+ if fee_per_cost > item.fee_per_cost:
+ return Err.INVALID_FEE_LOW_FEE
+ to_remove.append(name)
+ self.remove_from_pool(to_remove, MempoolRemoveReason.EXPIRED)
+ # if we don't find any entries, it's OK to add this entry
+
+ total_cost = int(self.total_mempool_cost())
+ if total_cost + item.cost > self.mempool_info.max_size_in_cost:
+ # pick the items with the lowest fee per cost to remove
+ cursor = self._db_conn.execute(
+ """SELECT name FROM tx
+ WHERE name NOT IN (
+ SELECT name FROM (
+ SELECT name,
+ SUM(cost) OVER (ORDER BY fee_per_cost DESC, seq ASC) AS total_cost
+ FROM tx) AS tx_with_cost
+ WHERE total_cost <= ?)
+ """,
+ (self.mempool_info.max_size_in_cost - item.cost,),
)
+ to_remove = [bytes32(row[0]) for row in cursor]
+ self.remove_from_pool(to_remove, MempoolRemoveReason.POOL_FULL)
+
+ # TODO: In the future, for the "fee_per_cost" field, opt for
+ # "GENERATED ALWAYS AS (CAST(fee AS REAL) / cost) VIRTUAL"
+ self._db_conn.execute(
+ "INSERT INTO "
+ "tx(name,cost,fee,assert_height,assert_before_height,assert_before_seconds,fee_per_cost) "
+ "VALUES(?, ?, ?, ?, ?, ?, ?)",
+ (
+ item.name,
+ item.cost,
+ item.fee,
+ item.assert_height,
+ item.assert_before_height,
+ item.assert_before_seconds,
+ item.fee / item.cost,
+ ),
+ )
all_coin_spends = [(s.coin_id, item.name) for s in item.npc_result.conds.spends]
self._db_conn.executemany("INSERT INTO spends VALUES(?, ?)", all_coin_spends)
self._items[item.name] = InternalMempoolItem(
- item.spend_bundle, item.npc_result, item.height_added_to_mempool
+ item.spend_bundle, item.npc_result, item.height_added_to_mempool, item.bundle_coin_spends
)
info = FeeMempoolInfo(self.mempool_info, self.total_mempool_cost(), self.total_mempool_fees(), datetime.now())
self.fee_estimator.add_mempool_item(info, MempoolItemInfo(item.cost, item.fee, item.height_added_to_mempool))
+ return None
def at_full_capacity(self, cost: int) -> bool:
"""
@@ -296,3 +364,56 @@ def at_full_capacity(self, cost: int) -> bool:
"""
return self.total_mempool_cost() + cost > self.mempool_info.max_size_in_cost
+
+ def create_bundle_from_mempool_items(
+ self, item_inclusion_filter: Callable[[bytes32], bool]
+ ) -> Optional[Tuple[SpendBundle, List[Coin]]]:
+ cost_sum = 0 # Checks that total cost does not exceed block maximum
+ fee_sum = 0 # Checks that total fees don't exceed 64 bits
+ processed_spend_bundles = 0
+ additions: List[Coin] = []
+ # This contains a map of coin ID to a coin spend solution and its isolated cost
+ # We reconstruct it for every bundle we create from mempool items because we
+ # deduplicate on the first coin spend solution that comes with the highest
+ # fee rate item, and that can change across calls
+ eligible_coin_spends = EligibleCoinSpends()
+ coin_spends: List[CoinSpend] = []
+ sigs: List[G2Element] = []
+ log.info(f"Starting to make block, max cost: {self.mempool_info.max_block_clvm_cost}")
+ with self._db_conn:
+ cursor = self._db_conn.execute("SELECT name, fee FROM tx ORDER BY fee_per_cost DESC, seq ASC")
+ for row in cursor:
+ name = bytes32(row[0])
+ fee = int(row[1])
+ item = self._items[name]
+ if not item_inclusion_filter(name):
+ continue
+ try:
+ unique_coin_spends, cost_saving, unique_additions = eligible_coin_spends.get_deduplication_info(
+ bundle_coin_spends=item.bundle_coin_spends, max_cost=item.npc_result.cost
+ )
+ item_cost = item.npc_result.cost - cost_saving
+ log.info("Cumulative cost: %d, fee per cost: %0.4f", cost_sum, fee / item_cost)
+ if (
+ item_cost + cost_sum > self.mempool_info.max_block_clvm_cost
+ or fee + fee_sum > DEFAULT_CONSTANTS.MAX_COIN_AMOUNT
+ ):
+ break
+ coin_spends.extend(unique_coin_spends)
+ additions.extend(unique_additions)
+ sigs.append(item.spend_bundle.aggregated_signature)
+ cost_sum += item_cost
+ fee_sum += fee
+ processed_spend_bundles += 1
+ except Exception as e:
+ log.debug(f"Exception while checking a mempool item for deduplication: {e}")
+ continue
+ if processed_spend_bundles == 0:
+ return None
+ log.info(
+ f"Cumulative cost of block (real cost should be less) {cost_sum}. Proportion "
+ f"full: {cost_sum / self.mempool_info.max_block_clvm_cost}"
+ )
+ aggregated_signature = AugSchemeMPL.aggregate(sigs)
+ agg = SpendBundle(coin_spends, aggregated_signature)
+ return agg, additions
diff --git a/chia/full_node/mempool_check_conditions.py b/chia/full_node/mempool_check_conditions.py
--- a/chia/full_node/mempool_check_conditions.py
+++ b/chia/full_node/mempool_check_conditions.py
@@ -1,7 +1,7 @@
from __future__ import annotations
import logging
-from typing import Dict, List, Optional, Tuple
+from typing import Dict, List, Optional
from chia_rs import ENABLE_ASSERT_BEFORE, LIMIT_STACK, MEMPOOL_MODE, NO_RELATIVE_CONDITIONS_ON_EPHEMERAL
from chia_rs import get_puzzle_and_solution_for_coin as get_puzzle_and_solution_for_coin_rust
@@ -16,18 +16,15 @@
from chia.types.blockchain_format.serialized_program import SerializedProgram
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.coin_record import CoinRecord
-from chia.types.coin_spend import CoinSpend
+from chia.types.coin_spend import CoinSpend, SpendInfo
from chia.types.generator_types import BlockGenerator
from chia.types.spend_bundle_conditions import SpendBundleConditions
from chia.util.errors import Err
from chia.util.ints import uint16, uint32, uint64
from chia.wallet.puzzles.load_clvm import load_serialized_clvm_maybe_recompile
-from chia.wallet.puzzles.rom_bootstrap_generator import get_generator
-
-GENERATOR_MOD = get_generator()
DESERIALIZE_MOD = load_serialized_clvm_maybe_recompile(
- "chialisp_deserialisation.clvm", package_or_requirement="chia.wallet.puzzles"
+ "chialisp_deserialisation.clsp", package_or_requirement="chia.wallet.puzzles"
)
log = logging.getLogger(__name__)
@@ -41,13 +38,10 @@ def get_name_puzzle_conditions(
height: uint32,
constants: ConsensusConstants = DEFAULT_CONSTANTS,
) -> NPCResult:
-
if mempool_mode:
flags = MEMPOOL_MODE
- elif height >= constants.SOFT_FORK_HEIGHT:
- flags = LIMIT_STACK
else:
- flags = 0
+ flags = LIMIT_STACK
if height >= constants.SOFT_FORK2_HEIGHT:
flags = flags | ENABLE_ASSERT_BEFORE | NO_RELATIVE_CONDITIONS_ON_EPHEMERAL
@@ -66,9 +60,7 @@ def get_name_puzzle_conditions(
return NPCResult(uint16(Err.GENERATOR_RUNTIME_ERROR.value), None, uint64(0))
-def get_puzzle_and_solution_for_coin(
- generator: BlockGenerator, coin: Coin
-) -> Tuple[Optional[Exception], Optional[SerializedProgram], Optional[SerializedProgram]]:
+def get_puzzle_and_solution_for_coin(generator: BlockGenerator, coin: Coin) -> SpendInfo:
try:
args = bytearray(b"\xff")
args += bytes(DESERIALIZE_MOD)
@@ -84,10 +76,9 @@ def get_puzzle_and_solution_for_coin(
coin.amount,
coin.puzzle_hash,
)
-
- return None, SerializedProgram.from_bytes(puzzle), SerializedProgram.from_bytes(solution)
+ return SpendInfo(SerializedProgram.from_bytes(puzzle), SerializedProgram.from_bytes(solution))
except Exception as e:
- return e, None, None
+ raise ValueError(f"Failed to get puzzle and solution for coin {coin}, error: {e}") from e
def get_spends_for_block(generator: BlockGenerator) -> List[CoinSpend]:
diff --git a/chia/full_node/mempool_manager.py b/chia/full_node/mempool_manager.py
--- a/chia/full_node/mempool_manager.py
+++ b/chia/full_node/mempool_manager.py
@@ -10,6 +10,7 @@
from typing import Awaitable, Callable, Dict, List, Optional, Set, Tuple, TypeVar
from blspy import GTElement
+from chia_rs import ELIGIBLE_FOR_DEDUP
from chiabip158 import PyBIP158
from chia.consensus.block_record import BlockRecordProtocol
@@ -28,7 +29,7 @@
from chia.types.coin_record import CoinRecord
from chia.types.fee_rate import FeeRate
from chia.types.mempool_inclusion_status import MempoolInclusionStatus
-from chia.types.mempool_item import MempoolItem
+from chia.types.mempool_item import BundleCoinSpend, MempoolItem
from chia.types.spend_bundle import SpendBundle
from chia.types.spend_bundle_conditions import SpendBundleConditions
from chia.util import cached_bls
@@ -36,7 +37,6 @@
from chia.util.condition_tools import pkm_pairs
from chia.util.db_wrapper import SQLITE_INT_MAX
from chia.util.errors import Err, ValidationError
-from chia.util.generator_tools import additions_for_npc
from chia.util.inline_executor import InlineExecutor
from chia.util.ints import uint32, uint64
from chia.util.lru_cache import LRUCache
@@ -76,7 +76,7 @@ def validate_clvm_and_signature(
pks: List[bytes48] = []
msgs: List[bytes] = []
assert result.conds is not None
- pks, msgs = pkm_pairs(result.conds, additional_data, soft_fork=True)
+ pks, msgs = pkm_pairs(result.conds, additional_data)
# Verify aggregated signature
cache: LRUCache[bytes32, GTElement] = LRUCache(10000)
@@ -209,65 +209,27 @@ def __init__(
def shut_down(self) -> None:
self.pool.shutdown(wait=True)
- def process_mempool_items(
- self, item_inclusion_filter: Callable[[bytes32], bool]
- ) -> Tuple[List[SpendBundle], uint64, List[Coin], List[Coin]]:
- cost_sum = 0 # Checks that total cost does not exceed block maximum
- fee_sum = 0 # Checks that total fees don't exceed 64 bits
- spend_bundles: List[SpendBundle] = []
- removals: List[Coin] = []
- additions: List[Coin] = []
- for item in self.mempool.spends_by_feerate():
- if not item_inclusion_filter(item.name):
- continue
- log.info("Cumulative cost: %d, fee per cost: %0.4f", cost_sum, item.fee_per_cost)
- if item.cost + cost_sum > self.max_block_clvm_cost or item.fee + fee_sum > self.constants.MAX_COIN_AMOUNT:
- return (spend_bundles, uint64(cost_sum), additions, removals)
- spend_bundles.append(item.spend_bundle)
- cost_sum += item.cost
- fee_sum += item.fee
- removals.extend(item.removals)
- if item.npc_result.conds is not None:
- for spend in item.npc_result.conds.spends:
- for puzzle_hash, amount, _ in spend.create_coin:
- coin = Coin(spend.coin_id, puzzle_hash, amount)
- additions.append(coin)
- return (spend_bundles, uint64(cost_sum), additions, removals)
-
def create_bundle_from_mempool(
- self,
- last_tb_header_hash: bytes32,
- item_inclusion_filter: Optional[Callable[[bytes32], bool]] = None,
- ) -> Optional[Tuple[SpendBundle, List[Coin], List[Coin]]]:
+ self, last_tb_header_hash: bytes32, item_inclusion_filter: Optional[Callable[[bytes32], bool]] = None
+ ) -> Optional[Tuple[SpendBundle, List[Coin]]]:
"""
Returns aggregated spendbundle that can be used for creating new block,
additions and removals in that spend_bundle
"""
if self.peak is None or self.peak.header_hash != last_tb_header_hash:
return None
-
if item_inclusion_filter is None:
def always(bundle_name: bytes32) -> bool:
return True
item_inclusion_filter = always
-
- log.info(f"Starting to make block, max cost: {self.max_block_clvm_cost}")
- spend_bundles, cost_sum, additions, removals = self.process_mempool_items(item_inclusion_filter)
- if len(spend_bundles) == 0:
- return None
- log.info(
- f"Cumulative cost of block (real cost should be less) {cost_sum}. Proportion "
- f"full: {cost_sum / self.max_block_clvm_cost}"
- )
- agg = SpendBundle.aggregate(spend_bundles)
- return agg, additions, removals
+ return self.mempool.create_bundle_from_mempool_items(item_inclusion_filter)
def get_filter(self) -> bytes:
all_transactions: Set[bytes32] = set()
byte_array_list = []
- for key in self.mempool.all_spend_ids():
+ for key in self.mempool.all_item_ids():
if key not in all_transactions:
all_transactions.add(key)
byte_array_list.append(bytearray(key))
@@ -362,7 +324,7 @@ async def add_spend_bundle(
"""
# Skip if already added
- existing_item = self.mempool.get_spend_by_id(spend_name)
+ existing_item = self.mempool.get_item_by_id(spend_name)
if existing_item is not None:
return existing_item.cost, MempoolInclusionStatus.SUCCESS, None
@@ -373,7 +335,9 @@ async def add_spend_bundle(
# No error, immediately add to mempool, after removing conflicting TXs.
assert item is not None
self.mempool.remove_from_pool(remove_items, MempoolRemoveReason.CONFLICT)
- self.mempool.add_to_pool(item)
+ err = self.mempool.add_to_pool(item)
+ if err is not None:
+ return item.cost, MempoolInclusionStatus.FAILED, err
return item.cost, MempoolInclusionStatus.SUCCESS, None
elif err is Err.MEMPOOL_CONFLICT and item is not None:
# The transaction has a conflict with another item in the
@@ -425,18 +389,35 @@ async def validate_spend_bundle(
log.debug(f"Cost: {cost}")
assert npc_result.conds is not None
- # build set of removals
- removal_names: Set[bytes32] = set(bytes32(spend.coin_id) for spend in npc_result.conds.spends)
- if removal_names != set(s.name() for s in new_spend.removals()):
- # If you reach here it's probably because your program reveal doesn't match the coin's puzzle hash
- return Err.INVALID_SPEND_BUNDLE, None, []
-
- additions: List[Coin] = additions_for_npc(npc_result)
+ removal_names: Set[bytes32] = set()
additions_dict: Dict[bytes32, Coin] = {}
addition_amount: int = 0
- for add in additions:
- additions_dict[add.name()] = add
- addition_amount = addition_amount + add.amount
+ eligibility_and_additions: Dict[bytes32, Tuple[bool, List[Coin]]] = {}
+ non_eligible_coin_ids: List[bytes32] = []
+ for spend in npc_result.conds.spends:
+ coin_id = bytes32(spend.coin_id)
+ removal_names.add(coin_id)
+ spend_additions = []
+ for puzzle_hash, amount, _ in spend.create_coin:
+ child_coin = Coin(coin_id, puzzle_hash, amount)
+ spend_additions.append(child_coin)
+ additions_dict[child_coin.name()] = child_coin
+ addition_amount = addition_amount + child_coin.amount
+ is_eligible = bool(spend.flags & ELIGIBLE_FOR_DEDUP)
+ if not is_eligible:
+ non_eligible_coin_ids.append(coin_id)
+ eligibility_and_additions[coin_id] = (is_eligible, spend_additions)
+ removal_names_from_coin_spends: Set[bytes32] = set()
+ bundle_coin_spends: Dict[bytes32, BundleCoinSpend] = {}
+ for coin_spend in new_spend.coin_spends:
+ coin_id = coin_spend.coin.name()
+ removal_names_from_coin_spends.add(coin_id)
+ eligible_for_dedup, spend_additions = eligibility_and_additions.get(coin_id, (False, []))
+ bundle_coin_spends[coin_id] = BundleCoinSpend(coin_spend, eligible_for_dedup, spend_additions)
+
+ if removal_names != removal_names_from_coin_spends:
+ # If you reach here it's probably because your program reveal doesn't match the coin's puzzle hash
+ return Err.INVALID_SPEND_BUNDLE, None, []
removal_record_dict: Dict[bytes32, CoinRecord] = {}
removal_amount: int = 0
@@ -470,6 +451,9 @@ async def validate_spend_bundle(
if cost == 0:
return Err.UNKNOWN, None, []
+ if cost > self.max_block_clvm_cost:
+ return Err.BLOCK_COST_EXCEEDS_MAX, None, []
+
# this is not very likely to happen, but it's here to ensure SQLite
# never runs out of precision in its computation of fees.
# sqlite's integers are signed int64, so the max value they can
@@ -486,7 +470,7 @@ async def validate_spend_bundle(
return Err.INVALID_FEE_LOW_FEE, None, []
# Check removals against UnspentDB + DiffStore + Mempool + SpendBundle
# Use this information later when constructing a block
- fail_reason, conflicts = self.check_removals(removal_record_dict)
+ fail_reason, conflicts = self.check_removals(non_eligible_coin_ids, removal_record_dict)
# If we have a mempool conflict, continue, since we still want to keep around the TX in the pending pool.
if fail_reason is not None and fail_reason is not Err.MEMPOOL_CONFLICT:
@@ -525,6 +509,7 @@ async def validate_spend_bundle(
timelocks.assert_height,
timelocks.assert_before_height,
timelocks.assert_before_seconds,
+ bundle_coin_spends,
)
if tl_error:
@@ -548,7 +533,9 @@ async def validate_spend_bundle(
return None, potential, [item.name for item in conflicts]
- def check_removals(self, removals: Dict[bytes32, CoinRecord]) -> Tuple[Optional[Err], Set[MempoolItem]]:
+ def check_removals(
+ self, non_eligible_coin_ids: List[bytes32], removals: Dict[bytes32, CoinRecord]
+ ) -> Tuple[Optional[Err], List[MempoolItem]]:
"""
This function checks for double spends, unknown spends and conflicting transactions in mempool.
Returns Error (if any), the set of existing MempoolItems with conflicting spends (if any).
@@ -556,25 +543,21 @@ def check_removals(self, removals: Dict[bytes32, CoinRecord]) -> Tuple[Optional[
having duplicate removals.
"""
assert self.peak is not None
- conflicts: Set[MempoolItem] = set()
-
+ # 1. Checks if it's been spent already
for record in removals.values():
- removal = record.coin
- # 1. Checks if it's been spent already
if record.spent:
- return Err.DOUBLE_SPEND, set()
- # 2. Checks if there's a mempool conflict
- items: List[MempoolItem] = self.mempool.get_spends_by_coin_id(removal.name())
- conflicts.update(items)
-
+ return Err.DOUBLE_SPEND, []
+ # 2. Checks if there's a mempool conflict
+ # Only consider conflicts if the coin is not eligible for deduplication
+ conflicts = self.mempool.get_items_by_coin_ids(non_eligible_coin_ids)
if len(conflicts) > 0:
return Err.MEMPOOL_CONFLICT, conflicts
# 5. If coins can be spent return list of unspents as we see them in local storage
- return None, set()
+ return None, []
def get_spendbundle(self, bundle_hash: bytes32) -> Optional[SpendBundle]:
"""Returns a full SpendBundle if it's inside one the mempools"""
- item: Optional[MempoolItem] = self.mempool.get_spend_by_id(bundle_hash)
+ item: Optional[MempoolItem] = self.mempool.get_item_by_id(bundle_hash)
if item is not None:
return item.spend_bundle
return None
@@ -586,7 +569,7 @@ def get_mempool_item(self, bundle_hash: bytes32, include_pending: bool = False)
If include_pending is specified, also check the PENDING cache.
"""
- item = self.mempool.get_spend_by_id(bundle_hash)
+ item = self.mempool.get_item_by_id(bundle_hash)
if not item and include_pending:
# no async lock needed since we're not mutating the pending_cache
item = self._pending_cache.get(bundle_hash)
@@ -626,7 +609,7 @@ async def new_peak(
# to deduplicate
spendbundle_ids_to_remove: Set[bytes32] = set()
for spend in last_npc_result.conds.spends:
- items: List[MempoolItem] = self.mempool.get_spends_by_coin_id(bytes32(spend.coin_id))
+ items: List[MempoolItem] = self.mempool.get_items_by_coin_id(bytes32(spend.coin_id))
for item in items:
included_items.append(MempoolItemInfo(item.cost, item.fee, item.height_added_to_mempool))
self.remove_seen(item.name)
@@ -636,7 +619,7 @@ async def new_peak(
old_pool = self.mempool
self.mempool = Mempool(old_pool.mempool_info, old_pool.fee_estimator)
self.seen_bundle_hashes = {}
- for item in old_pool.all_spends():
+ for item in old_pool.all_items():
_, result, err = await self.add_spend_bundle(
item.spend_bundle, item.npc_result, item.spend_bundle_name, item.height_added_to_mempool
)
@@ -673,7 +656,7 @@ def get_items_not_in_filter(self, mempool_filter: PyBIP158, limit: int = 100) ->
assert limit > 0
# Send 100 with the highest fee per cost
- for item in self.mempool.spends_by_feerate():
+ for item in self.mempool.items_by_feerate():
if len(items) >= limit:
return items
if mempool_filter.Match(bytearray(item.spend_bundle_name)):
@@ -695,7 +678,7 @@ def optional_max(a: Optional[T], b: Optional[T]) -> Optional[T]:
def can_replace(
- conflicting_items: Set[MempoolItem],
+ conflicting_items: List[MempoolItem],
removal_names: Set[bytes32],
new_item: MempoolItem,
) -> bool:
diff --git a/chia/introducer/introducer_api.py b/chia/introducer/introducer_api.py
--- a/chia/introducer/introducer_api.py
+++ b/chia/introducer/introducer_api.py
@@ -40,7 +40,7 @@ async def request_peers_introducer(
if r_peer.vetted <= 0:
continue
- if r_peer.host == peer.peer_host and r_peer.port == peer.peer_server_port:
+ if r_peer.host == peer.peer_info.host and r_peer.port == peer.peer_server_port:
continue
peer_without_timestamp = TimestampedPeerInfo(
r_peer.host,
diff --git a/chia/plot_sync/receiver.py b/chia/plot_sync/receiver.py
--- a/chia/plot_sync/receiver.py
+++ b/chia/plot_sync/receiver.py
@@ -349,8 +349,8 @@ def to_dict(self, counts_only: bool = False) -> Dict[str, Any]:
return {
"connection": {
"node_id": self._connection.peer_node_id,
- "host": self._connection.peer_host,
- "port": self._connection.peer_port,
+ "host": self._connection.peer_info.host,
+ "port": self._connection.peer_info.port,
},
"plots": get_list_or_len(list(self._plots.values()), counts_only),
"failed_to_open_filenames": get_list_or_len(self._invalid, counts_only),
diff --git a/chia/pools/pool_puzzles.py b/chia/pools/pool_puzzles.py
--- a/chia/pools/pool_puzzles.py
+++ b/chia/pools/pool_puzzles.py
@@ -21,10 +21,10 @@
log = logging.getLogger(__name__)
# "Full" is the outer singleton, with the inner puzzle filled in
-SINGLETON_MOD = load_clvm_maybe_recompile("singleton_top_layer.clvm")
-POOL_WAITING_ROOM_MOD = load_clvm_maybe_recompile("pool_waitingroom_innerpuz.clvm")
-POOL_MEMBER_MOD = load_clvm_maybe_recompile("pool_member_innerpuz.clvm")
-P2_SINGLETON_MOD = load_clvm_maybe_recompile("p2_singleton_or_delayed_puzhash.clvm")
+SINGLETON_MOD = load_clvm_maybe_recompile("singleton_top_layer.clsp")
+POOL_WAITING_ROOM_MOD = load_clvm_maybe_recompile("pool_waitingroom_innerpuz.clsp")
+POOL_MEMBER_MOD = load_clvm_maybe_recompile("pool_member_innerpuz.clsp")
+P2_SINGLETON_MOD = load_clvm_maybe_recompile("p2_singleton_or_delayed_puzhash.clsp")
POOL_OUTER_MOD = SINGLETON_MOD
POOL_MEMBER_HASH = POOL_MEMBER_MOD.get_tree_hash()
diff --git a/chia/pools/pool_wallet.py b/chia/pools/pool_wallet.py
--- a/chia/pools/pool_wallet.py
+++ b/chia/pools/pool_wallet.py
@@ -3,7 +3,7 @@
import dataclasses
import logging
import time
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, cast
+from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Set, Tuple, cast
from blspy import G1Element, G2Element, PrivateKey
from typing_extensions import final
@@ -59,6 +59,11 @@
@final
@dataclasses.dataclass
class PoolWallet:
+ if TYPE_CHECKING:
+ from chia.wallet.wallet_protocol import WalletProtocol
+
+ _protocol_check: ClassVar[WalletProtocol] = cast("PoolWallet", None)
+
MINIMUM_INITIAL_BALANCE = 1
MINIMUM_RELATIVE_LOCK_HEIGHT = 5
MAXIMUM_RELATIVE_LOCK_HEIGHT = 1000
@@ -988,9 +993,3 @@ def puzzle_hash_for_pk(self, pubkey: G1Element) -> bytes32:
def get_name(self) -> str:
return self.wallet_info.name
-
-
-if TYPE_CHECKING:
- from chia.wallet.wallet_protocol import WalletProtocol
-
- _dummy: WalletProtocol = cast(PoolWallet, None)
diff --git a/chia/protocols/wallet_protocol.py b/chia/protocols/wallet_protocol.py
--- a/chia/protocols/wallet_protocol.py
+++ b/chia/protocols/wallet_protocol.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from typing import List, Optional, Tuple
-from chia_rs import CoinState, RespondToPhUpdates
+import chia_rs
from chia.full_node.fee_estimate import FeeEstimateGroup
from chia.types.blockchain_format.coin import Coin
@@ -20,7 +20,8 @@
"""
-__all__ = ["CoinState", "RespondToPhUpdates"]
+CoinState = chia_rs.CoinState
+RespondToPhUpdates = chia_rs.RespondToPhUpdates
@streamable
diff --git a/chia/rpc/data_layer_rpc_api.py b/chia/rpc/data_layer_rpc_api.py
--- a/chia/rpc/data_layer_rpc_api.py
+++ b/chia/rpc/data_layer_rpc_api.py
@@ -99,6 +99,7 @@ def get_routes(self) -> Dict[str, Endpoint]:
"/verify_offer": self.verify_offer,
"/cancel_offer": self.cancel_offer,
"/get_sync_status": self.get_sync_status,
+ "/check_plugins": self.check_plugins,
}
async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]]) -> List[WsRpcMessage]:
@@ -428,3 +429,10 @@ async def get_sync_status(self, request: Dict[str, Any]) -> EndpointResult:
"target_generation": sync_status.target_generation,
}
}
+
+ async def check_plugins(self, request: Dict[str, Any]) -> EndpointResult:
+ if self.service is None:
+ raise Exception("Data layer not created")
+ plugin_status = await self.service.check_plugins()
+
+ return plugin_status.marshal()
diff --git a/chia/rpc/data_layer_rpc_client.py b/chia/rpc/data_layer_rpc_client.py
--- a/chia/rpc/data_layer_rpc_client.py
+++ b/chia/rpc/data_layer_rpc_client.py
@@ -116,3 +116,7 @@ async def get_owned_stores(self) -> Dict[str, Any]:
async def get_sync_status(self, store_id: bytes32) -> Dict[str, Any]:
response = await self.fetch("get_sync_status", {"id": store_id.hex()})
return response
+
+ async def check_plugins(self) -> Dict[str, Any]:
+ response = await self.fetch("check_plugins", {})
+ return response
diff --git a/chia/rpc/full_node_rpc_api.py b/chia/rpc/full_node_rpc_api.py
--- a/chia/rpc/full_node_rpc_api.py
+++ b/chia/rpc/full_node_rpc_api.py
@@ -694,14 +694,8 @@ async def get_puzzle_and_solution(self, request: Dict[str, Any]) -> EndpointResu
block_generator: Optional[BlockGenerator] = await self.service.blockchain.get_block_generator(block)
assert block_generator is not None
- error, puzzle, solution = get_puzzle_and_solution_for_coin(block_generator, coin_record.coin)
- if error is not None:
- raise ValueError(f"Error: {error}")
-
- assert puzzle is not None
- assert solution is not None
-
- return {"coin_solution": CoinSpend(coin_record.coin, puzzle, solution)}
+ spend_info = get_puzzle_and_solution_for_coin(block_generator, coin_record.coin)
+ return {"coin_solution": CoinSpend(coin_record.coin, spend_info.puzzle, spend_info.solution)}
async def get_additions_and_removals(self, request: Dict[str, Any]) -> EndpointResult:
if "header_hash" not in request:
@@ -724,12 +718,12 @@ async def get_additions_and_removals(self, request: Dict[str, Any]) -> EndpointR
}
async def get_all_mempool_tx_ids(self, _: Dict[str, Any]) -> EndpointResult:
- ids = list(self.service.mempool_manager.mempool.all_spend_ids())
+ ids = list(self.service.mempool_manager.mempool.all_item_ids())
return {"tx_ids": ids}
async def get_all_mempool_items(self, _: Dict[str, Any]) -> EndpointResult:
spends = {}
- for item in self.service.mempool_manager.mempool.all_spends():
+ for item in self.service.mempool_manager.mempool.all_items():
spends[item.name.hex()] = item.to_json_dict()
return {"mempool_items": spends}
@@ -810,6 +804,7 @@ async def get_fee_estimate(self, request: Dict[str, Any]) -> Dict[str, Any]:
# at set times into the future. This can lead to situations that users do not expect,
# such as estimating a higher fee for a longer transaction time.
estimates = make_monotonically_decreasing(estimates)
+ estimates = [uint64(e) for e in estimates]
current_fee_rate = estimator.estimate_fee_rate(time_offset_seconds=1)
mempool_size = self.service.mempool_manager.mempool.total_mempool_cost()
mempool_fees = self.service.mempool_manager.mempool.total_mempool_fees()
diff --git a/chia/rpc/full_node_rpc_client.py b/chia/rpc/full_node_rpc_client.py
--- a/chia/rpc/full_node_rpc_client.py
+++ b/chia/rpc/full_node_rpc_client.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from typing import Any, Dict, List, Optional, Tuple
+from typing import Any, Dict, List, Optional, Tuple, cast
from chia.consensus.block_record import BlockRecord
from chia.full_node.signage_point import SignagePoint
@@ -13,10 +13,10 @@
from chia.types.spend_bundle import SpendBundle
from chia.types.unfinished_header_block import UnfinishedHeaderBlock
from chia.util.byte_types import hexstr_to_bytes
-from chia.util.ints import uint32, uint64
+from chia.util.ints import uint32
-def coin_record_dict_backwards_compat(coin_record: Dict[str, Any]):
+def coin_record_dict_backwards_compat(coin_record: Dict[str, Any]) -> Dict[str, Any]:
del coin_record["spent"]
return coin_record
@@ -30,13 +30,13 @@ class FullNodeRpcClient(RpcClient):
to the full node.
"""
- async def get_blockchain_state(self) -> Dict:
+ async def get_blockchain_state(self) -> Dict[str, Any]:
response = await self.fetch("get_blockchain_state", {})
if response["blockchain_state"]["peak"] is not None:
response["blockchain_state"]["peak"] = BlockRecord.from_json_dict(response["blockchain_state"]["peak"])
- return response["blockchain_state"]
+ return cast(Dict[str, Any], response["blockchain_state"])
- async def get_block(self, header_hash) -> Optional[FullBlock]:
+ async def get_block(self, header_hash: bytes32) -> Optional[FullBlock]:
try:
response = await self.fetch("get_block", {"header_hash": header_hash.hex()})
except Exception:
@@ -49,14 +49,14 @@ async def get_blocks(self, start: int, end: int, exclude_reorged: bool = False)
)
return [FullBlock.from_json_dict(block) for block in response["blocks"]]
- async def get_block_record_by_height(self, height) -> Optional[BlockRecord]:
+ async def get_block_record_by_height(self, height: int) -> Optional[BlockRecord]:
try:
response = await self.fetch("get_block_record_by_height", {"height": height})
except Exception:
return None
return BlockRecord.from_json_dict(response["block_record"])
- async def get_block_record(self, header_hash) -> Optional[BlockRecord]:
+ async def get_block_record(self, header_hash: bytes32) -> Optional[BlockRecord]:
try:
response = await self.fetch("get_block_record", {"header_hash": header_hash.hex()})
if response["block_record"] is None:
@@ -73,20 +73,16 @@ async def get_all_block(self, start: uint32, end: uint32) -> List[FullBlock]:
response = await self.fetch("get_blocks", {"start": start, "end": end, "exclude_header_hash": True})
return [FullBlock.from_json_dict(r) for r in response["blocks"]]
- async def get_network_space(
- self, newer_block_header_hash: bytes32, older_block_header_hash: bytes32
- ) -> Optional[uint64]:
- try:
- network_space_bytes_estimate = await self.fetch(
- "get_network_space",
- {
- "newer_block_header_hash": newer_block_header_hash.hex(),
- "older_block_header_hash": older_block_header_hash.hex(),
- },
- )
- except Exception:
- return None
- return network_space_bytes_estimate["space"]
+ async def get_network_space(self, newer_block_header_hash: bytes32, older_block_header_hash: bytes32) -> int:
+ network_space_bytes_estimate = await self.fetch(
+ "get_network_space",
+ {
+ "newer_block_header_hash": newer_block_header_hash.hex(),
+ "older_block_header_hash": older_block_header_hash.hex(),
+ },
+ )
+
+ return cast(int, network_space_bytes_estimate["space"])
async def get_coin_record_by_name(self, coin_id: bytes32) -> Optional[CoinRecord]:
try:
@@ -102,7 +98,7 @@ async def get_coin_records_by_names(
include_spent_coins: bool = True,
start_height: Optional[int] = None,
end_height: Optional[int] = None,
- ) -> List:
+ ) -> List[CoinRecord]:
names_hex = [name.hex() for name in names]
d = {"names": names_hex, "include_spent_coins": include_spent_coins}
if start_height is not None:
@@ -119,7 +115,7 @@ async def get_coin_records_by_puzzle_hash(
include_spent_coins: bool = True,
start_height: Optional[int] = None,
end_height: Optional[int] = None,
- ) -> List:
+ ) -> List[CoinRecord]:
d = {"puzzle_hash": puzzle_hash.hex(), "include_spent_coins": include_spent_coins}
if start_height is not None:
d["start_height"] = start_height
@@ -135,7 +131,7 @@ async def get_coin_records_by_puzzle_hashes(
include_spent_coins: bool = True,
start_height: Optional[int] = None,
end_height: Optional[int] = None,
- ) -> List:
+ ) -> List[CoinRecord]:
puzzle_hashes_hex = [ph.hex() for ph in puzzle_hashes]
d = {"puzzle_hashes": puzzle_hashes_hex, "include_spent_coins": include_spent_coins}
if start_height is not None:
@@ -152,7 +148,7 @@ async def get_coin_records_by_parent_ids(
include_spent_coins: bool = True,
start_height: Optional[int] = None,
end_height: Optional[int] = None,
- ) -> List:
+ ) -> List[CoinRecord]:
parent_ids_hex = [pid.hex() for pid in parent_ids]
d = {"parent_ids": parent_ids_hex, "include_spent_coins": include_spent_coins}
if start_height is not None:
@@ -169,7 +165,7 @@ async def get_coin_records_by_hint(
include_spent_coins: bool = True,
start_height: Optional[int] = None,
end_height: Optional[int] = None,
- ) -> List:
+ ) -> List[CoinRecord]:
d = {"hint": hint.hex(), "include_spent_coins": include_spent_coins}
if start_height is not None:
d["start_height"] = start_height
@@ -192,7 +188,7 @@ async def get_additions_and_removals(self, header_hash: bytes32) -> Tuple[List[C
additions.append(CoinRecord.from_json_dict(coin_record_dict_backwards_compat(coin_record)))
return additions, removals
- async def get_block_records(self, start: int, end: int) -> List:
+ async def get_block_records(self, start: int, end: int) -> List[Dict[str, Any]]:
try:
response = await self.fetch("get_block_records", {"start": start, "end": end})
if response["block_records"] is None:
@@ -200,7 +196,7 @@ async def get_block_records(self, start: int, end: int) -> List:
except Exception:
return []
# TODO: return block records
- return response["block_records"]
+ return cast(List[Dict[str, Any]], response["block_records"])
async def get_block_spends(self, header_hash: bytes32) -> Optional[List[CoinSpend]]:
try:
@@ -212,7 +208,7 @@ async def get_block_spends(self, header_hash: bytes32) -> Optional[List[CoinSpen
except Exception:
return None
- async def push_tx(self, spend_bundle: SpendBundle):
+ async def push_tx(self, spend_bundle: SpendBundle) -> Dict[str, Any]:
return await self.fetch("push_tx", {"spend_bundle": spend_bundle.to_json_dict()})
async def get_puzzle_and_solution(self, coin_id: bytes32, height: uint32) -> Optional[CoinSpend]:
@@ -226,19 +222,23 @@ async def get_all_mempool_tx_ids(self) -> List[bytes32]:
response = await self.fetch("get_all_mempool_tx_ids", {})
return [bytes32(hexstr_to_bytes(tx_id_hex)) for tx_id_hex in response["tx_ids"]]
- async def get_all_mempool_items(self) -> Dict[bytes32, Dict]:
- response: Dict = await self.fetch("get_all_mempool_items", {})
- converted: Dict[bytes32, Dict] = {}
+ async def get_all_mempool_items(self) -> Dict[bytes32, Dict[str, Any]]:
+ response = await self.fetch("get_all_mempool_items", {})
+ converted: Dict[bytes32, Dict[str, Any]] = {}
for tx_id_hex, item in response["mempool_items"].items():
converted[bytes32(hexstr_to_bytes(tx_id_hex))] = item
return converted
- async def get_mempool_item_by_tx_id(self, tx_id: bytes32, include_pending: bool = False) -> Optional[Dict]:
+ async def get_mempool_item_by_tx_id(
+ self,
+ tx_id: bytes32,
+ include_pending: bool = False,
+ ) -> Optional[Dict[str, Any]]:
try:
response = await self.fetch(
"get_mempool_item_by_tx_id", {"tx_id": tx_id.hex(), "include_pending": include_pending}
)
- return response["mempool_item"]
+ return cast(Dict[str, Any], response["mempool_item"])
except Exception:
return None
diff --git a/chia/rpc/rpc_server.py b/chia/rpc/rpc_server.py
--- a/chia/rpc/rpc_server.py
+++ b/chia/rpc/rpc_server.py
@@ -21,7 +21,7 @@
from chia.util.config import str2bool
from chia.util.ints import uint16
from chia.util.json_util import dict_to_json_str
-from chia.util.network import WebServer, get_host_addr
+from chia.util.network import WebServer, resolve
from chia.util.ws_message import WsRpcMessage, create_payload, create_payload_dict, format_response, pong
log = logging.getLogger(__name__)
@@ -120,8 +120,8 @@ def default_get_connections(server: ChiaServer, request_node_type: Optional[Node
{
"type": con.connection_type,
"local_port": con.local_port,
- "peer_host": con.peer_host,
- "peer_port": con.peer_port,
+ "peer_host": con.peer_info.host,
+ "peer_port": con.peer_info.port,
"peer_server_port": con.peer_server_port,
"node_id": con.peer_node_id,
"creation_time": con.creation_time,
@@ -276,7 +276,7 @@ async def get_connections(self, request: Dict[str, Any]) -> EndpointResult:
async def open_connection(self, request: Dict[str, Any]) -> EndpointResult:
host = request["host"]
port = request["port"]
- target_node: PeerInfo = PeerInfo(str(get_host_addr(host, prefer_ipv6=self.prefer_ipv6)), uint16(int(port)))
+ target_node: PeerInfo = PeerInfo(await resolve(host, prefer_ipv6=self.prefer_ipv6), uint16(int(port)))
on_connect = None
if hasattr(self.rpc_api.service, "on_connect"):
on_connect = self.rpc_api.service.on_connect
diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py
--- a/chia/rpc/wallet_rpc_api.py
+++ b/chia/rpc/wallet_rpc_api.py
@@ -28,11 +28,13 @@
from chia.types.spend_bundle import SpendBundle
from chia.util.bech32m import decode_puzzle_hash, encode_puzzle_hash
from chia.util.byte_types import hexstr_to_bytes
-from chia.util.config import load_config
+from chia.util.config import load_config, str2bool
from chia.util.errors import KeychainIsLocked
from chia.util.ints import uint16, uint32, uint64
from chia.util.keychain import bytes_to_mnemonic, generate_mnemonic
+from chia.util.misc import UInt32Range
from chia.util.path import path_from_root
+from chia.util.streamable import Streamable, streamable
from chia.util.ws_message import WsRpcMessage, create_payload_dict
from chia.wallet.cat_wallet.cat_constants import DEFAULT_CATS
from chia.wallet.cat_wallet.cat_wallet import CATWallet
@@ -59,9 +61,10 @@
from chia.wallet.nft_wallet.uncurry_nft import UncurriedNFT
from chia.wallet.notification_store import Notification
from chia.wallet.outer_puzzles import AssetType
+from chia.wallet.payment import Payment
from chia.wallet.puzzle_drivers import PuzzleInfo, Solver
from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import puzzle_hash_for_synthetic_public_key
-from chia.wallet.singleton import create_fullpuz
+from chia.wallet.singleton import create_singleton_puzzle
from chia.wallet.trade_record import TradeRecord
from chia.wallet.trading.offer import Offer
from chia.wallet.transaction_record import TransactionRecord
@@ -69,8 +72,12 @@
from chia.wallet.util.address_type import AddressType, is_valid_address
from chia.wallet.util.compute_hints import compute_coin_hints
from chia.wallet.util.compute_memos import compute_memos
+from chia.wallet.util.query_filter import HashFilter, TransactionTypeFilter
from chia.wallet.util.transaction_type import TransactionType
-from chia.wallet.util.wallet_types import AmountWithPuzzlehash, WalletType
+from chia.wallet.util.wallet_sync_utils import fetch_coin_spend_for_coin_state
+from chia.wallet.util.wallet_types import WalletType
+from chia.wallet.vc_wallet.vc_store import VCProofs
+from chia.wallet.vc_wallet.vc_wallet import VCWallet
from chia.wallet.wallet import CHIP_0002_SIGN_MESSAGE_PREFIX, Wallet
from chia.wallet.wallet_coin_record import WalletCoinRecord
from chia.wallet.wallet_info import WalletInfo
@@ -90,7 +97,6 @@ def __init__(self, wallet_node: WalletNode):
assert wallet_node is not None
self.service = wallet_node
self.service_name = "chia_wallet"
- self.balance_cache: Dict[int, Any] = {}
def get_routes(self) -> Dict[str, Endpoint]:
return {
@@ -211,6 +217,14 @@ def get_routes(self) -> Dict[str, Endpoint]:
"/dl_get_mirrors": self.dl_get_mirrors,
"/dl_new_mirror": self.dl_new_mirror,
"/dl_delete_mirror": self.dl_delete_mirror,
+ # Verified Credential
+ "/vc_mint": self.vc_mint,
+ "/vc_get": self.vc_get,
+ "/vc_get_list": self.vc_get_list,
+ "/vc_spend": self.vc_spend,
+ "/vc_add_proofs": self.vc_add_proofs,
+ "/vc_get_proofs_for_root": self.vc_get_proofs_for_root,
+ "/vc_revoke": self.vc_revoke,
}
def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]:
@@ -267,10 +281,8 @@ async def _convert_tx_puzzle_hash(self, tx: TransactionRecord) -> TransactionRec
)
async def get_latest_singleton_coin_spend(
- self, peer: Optional[WSChiaConnection], coin_id: bytes32, latest: bool = True
+ self, peer: WSChiaConnection, coin_id: bytes32, latest: bool = True
) -> Tuple[CoinSpend, CoinState]:
- if peer is None:
- raise ValueError("No peers to get info from")
coin_state_list: List[CoinState] = await self.service.wallet_state_manager.wallet_node.get_coin_state(
[coin_id], peer=peer
)
@@ -299,9 +311,7 @@ async def get_latest_singleton_coin_spend(
if parent_coin_state_list is None or len(parent_coin_state_list) < 1:
raise ValueError(f"Parent coin record 0x{coin_state.coin.parent_coin_info.hex()} not found")
parent_coin_state: CoinState = parent_coin_state_list[0]
- coin_spend: CoinSpend = await self.service.wallet_state_manager.wallet_node.fetch_puzzle_solution(
- parent_coin_state.spent_height, parent_coin_state.coin, peer
- )
+ coin_spend = await fetch_coin_spend_for_coin_state(parent_coin_state, peer)
return coin_spend, coin_state
##########################################################################################
@@ -318,7 +328,6 @@ async def log_in(self, request) -> EndpointResult:
return {"fingerprint": fingerprint}
await self._stop_wallet()
- self.balance_cache = {}
started = await self.service._start_with_fingerprint(fingerprint)
if started is True:
return {"fingerprint": fingerprint}
@@ -540,9 +549,6 @@ async def push_tx(self, request: Dict) -> EndpointResult:
return {}
async def push_transactions(self, request: Dict) -> EndpointResult:
- if await self.service.wallet_state_manager.synced() is False:
- raise ValueError("Wallet needs to be fully synced before sending transactions")
-
wallet = self.service.wallet_state_manager.main_wallet
txs: List[TransactionRecord] = []
@@ -792,61 +798,15 @@ async def create_new_wallet(self, request: Dict) -> EndpointResult:
async def get_wallet_balance(self, request: Dict) -> EndpointResult:
wallet_id = uint32(int(request["wallet_id"]))
wallet = self.service.wallet_state_manager.wallets[wallet_id]
-
- # If syncing return the last available info or 0s
- syncing = self.service.wallet_state_manager.sync_mode
- if syncing:
- if wallet_id in self.balance_cache:
- wallet_balance = self.balance_cache[wallet_id]
- else:
- wallet_balance = {
- "wallet_id": wallet_id,
- "confirmed_wallet_balance": 0,
- "unconfirmed_wallet_balance": 0,
- "spendable_balance": 0,
- "pending_change": 0,
- "max_send_amount": 0,
- "unspent_coin_count": 0,
- "pending_coin_removal_count": 0,
- "wallet_type": wallet.type(),
- }
- if self.service.logged_in_fingerprint is not None:
- wallet_balance["fingerprint"] = self.service.logged_in_fingerprint
- if wallet.type() == WalletType.CAT:
- assert isinstance(wallet, CATWallet)
- wallet_balance["asset_id"] = wallet.get_asset_id()
- else:
- async with self.service.wallet_state_manager.lock:
- unspent_records = await self.service.wallet_state_manager.coin_store.get_unspent_coins_for_wallet(
- wallet_id
- )
- balance = await wallet.get_confirmed_balance(unspent_records)
- pending_balance = await wallet.get_unconfirmed_balance(unspent_records)
- spendable_balance = await wallet.get_spendable_balance(unspent_records)
- pending_change = await wallet.get_pending_change_balance()
- max_send_amount = await wallet.get_max_send_amount(unspent_records)
-
- unconfirmed_removals: Dict[
- bytes32, Coin
- ] = await wallet.wallet_state_manager.unconfirmed_removals_for_wallet(wallet_id)
- wallet_balance = {
- "wallet_id": wallet_id,
- "confirmed_wallet_balance": balance,
- "unconfirmed_wallet_balance": pending_balance,
- "spendable_balance": spendable_balance,
- "pending_change": pending_change,
- "max_send_amount": max_send_amount,
- "unspent_coin_count": len(unspent_records),
- "pending_coin_removal_count": len(unconfirmed_removals),
- "wallet_type": wallet.type(),
- }
- if self.service.logged_in_fingerprint is not None:
- wallet_balance["fingerprint"] = self.service.logged_in_fingerprint
- if wallet.type() == WalletType.CAT:
- assert isinstance(wallet, CATWallet)
- wallet_balance["asset_id"] = wallet.get_asset_id()
- self.balance_cache[wallet_id] = wallet_balance
-
+ balance = await self.service.get_balance(wallet_id)
+ wallet_balance = balance.to_json_dict()
+ wallet_balance["wallet_id"] = wallet_id
+ wallet_balance["wallet_type"] = wallet.type()
+ if self.service.logged_in_fingerprint is not None:
+ wallet_balance["fingerprint"] = self.service.logged_in_fingerprint
+ if wallet.type() == WalletType.CAT:
+ assert isinstance(wallet, CATWallet)
+ wallet_balance["asset_id"] = wallet.get_asset_id()
return {"wallet_balance": wallet_balance}
async def get_transaction(self, request: Dict) -> EndpointResult:
@@ -868,16 +828,13 @@ async def get_transaction_memo(self, request: Dict) -> EndpointResult:
if tr.spend_bundle is None or len(tr.spend_bundle.coin_spends) == 0:
if tr.type == uint32(TransactionType.INCOMING_TX.value):
# Fetch incoming tx coin spend
- peer: Optional[WSChiaConnection] = self.service.get_full_node_peer()
- assert peer is not None
+ peer = self.service.get_full_node_peer()
assert len(tr.additions) == 1
coin_state_list: List[CoinState] = await self.service.wallet_state_manager.wallet_node.get_coin_state(
[tr.additions[0].parent_coin_info], peer=peer
)
assert len(coin_state_list) == 1
- coin_spend: CoinSpend = await self.service.wallet_state_manager.wallet_node.fetch_puzzle_solution(
- coin_state_list[0].spent_height, coin_state_list[0].coin, peer
- )
+ coin_spend = await fetch_coin_spend_for_coin_state(coin_state_list[0], peer)
tr = dataclasses.replace(tr, spend_bundle=SpendBundle([coin_spend], G2Element()))
else:
raise ValueError(f"Transaction 0x{transaction_id.hex()} doesn't have any coin spend.")
@@ -901,9 +858,18 @@ async def get_transactions(self, request: Dict) -> EndpointResult:
to_puzzle_hash: Optional[bytes32] = None
if to_address is not None:
to_puzzle_hash = decode_puzzle_hash(to_address)
+ type_filter = None
+ if "type_filter" in request:
+ type_filter = TransactionTypeFilter.from_json_dict(request["type_filter"])
transactions = await self.service.wallet_state_manager.tx_store.get_transactions_between(
- wallet_id, start, end, sort_key=sort_key, reverse=reverse, to_puzzle_hash=to_puzzle_hash
+ wallet_id,
+ start,
+ end,
+ sort_key=sort_key,
+ reverse=reverse,
+ to_puzzle_hash=to_puzzle_hash,
+ type_filter=type_filter,
)
return {
"transactions": [
@@ -986,10 +952,10 @@ async def send_transaction(self, request) -> EndpointResult:
exclude_coin_amounts = [uint64(a) for a in exclude_coin_amounts]
exclude_coin_ids: Optional[List] = request.get("exclude_coin_ids")
if exclude_coin_ids is not None:
- coin_records = await self.service.wallet_state_manager.coin_store.get_coin_records(
- [bytes32.from_hexstr(hex_id) for hex_id in exclude_coin_ids]
+ result = await self.service.wallet_state_manager.coin_store.get_coin_records(
+ coin_id_filter=HashFilter.include([bytes32.from_hexstr(hex_id) for hex_id in exclude_coin_ids])
)
- exclude_coins = {wr.coin for wr in coin_records.values()}
+ exclude_coins = {wr.coin for wr in result.records}
else:
exclude_coins = set()
@@ -1159,24 +1125,28 @@ async def get_coin_records_by_names(self, request) -> EndpointResult:
if "names" not in request:
raise ValueError("Names not in request")
+ coin_ids = [bytes32.from_hexstr(name) for name in request["names"]]
kwargs: Dict[str, Any] = {
- "include_spent_coins": False,
- "coin_names": [hexstr_to_bytes(name) for name in request["names"]],
+ "coin_id_filter": HashFilter.include(coin_ids),
}
+
+ confirmed_range = UInt32Range()
if "start_height" in request:
- kwargs["start_height"] = uint32(request["start_height"])
+ confirmed_range = dataclasses.replace(confirmed_range, start=uint32(request["start_height"]))
if "end_height" in request:
- kwargs["end_height"] = uint32(request["end_height"])
+ confirmed_range = dataclasses.replace(confirmed_range, stop=uint32(request["end_height"]))
+ if confirmed_range != UInt32Range():
+ kwargs["confirmed_range"] = confirmed_range
- if "include_spent_coins" in request:
- kwargs["include_spent_coins"] = request["include_spent_coins"]
+ if "include_spent_coins" in request and not str2bool(request["include_spent_coins"]):
+ kwargs["spent_range"] = UInt32Range(start=uint32(uint32.MAXIMUM_EXCLUSIVE - 1))
async with self.service.wallet_state_manager.lock:
coin_records: List[CoinRecord] = await self.service.wallet_state_manager.get_coin_records_by_coin_ids(
**kwargs
)
missed_coins: List[str] = [
- "0x" + c_id.hex() for c_id in kwargs["coin_names"] if c_id not in [cr.name for cr in coin_records]
+ "0x" + c_id.hex() for c_id in coin_ids if c_id not in [cr.name for cr in coin_records]
]
if missed_coins:
raise ValueError(f"Coin ID's: {missed_coins} not found.")
@@ -1478,10 +1448,10 @@ async def cat_spend(self, request, hold_lock=True) -> EndpointResult:
exclude_coin_amounts = [uint64(a) for a in exclude_coin_amounts]
exclude_coin_ids: Optional[List] = request.get("exclude_coin_ids")
if exclude_coin_ids is not None:
- coin_records = await self.service.wallet_state_manager.coin_store.get_coin_records(
- [bytes32.from_hexstr(hex_id) for hex_id in exclude_coin_ids]
+ result = await self.service.wallet_state_manager.coin_store.get_coin_records(
+ coin_id_filter=HashFilter.include([bytes32.from_hexstr(hex_id) for hex_id in exclude_coin_ids])
)
- exclude_coins = {wr.coin for wr in coin_records.values()}
+ exclude_coins = {wr.coin for wr in result.records}
else:
exclude_coins = None
cat_discrepancy_params: Tuple[Optional[int], Optional[str], Optional[str]] = (
@@ -1658,9 +1628,7 @@ async def get_offer_summary(self, request) -> EndpointResult:
async def check_offer_validity(self, request) -> EndpointResult:
offer_hex: str = request["offer"]
offer = Offer.from_bech32(offer_hex)
- peer: Optional[WSChiaConnection] = self.service.get_full_node_peer()
- if peer is None:
- raise ValueError("No peer connected")
+ peer = self.service.get_full_node_peer()
return {
"valid": (await self.service.wallet_state_manager.trade_manager.check_offer_validity(offer, peer)),
"id": offer.name(),
@@ -1682,9 +1650,7 @@ async def take_offer(self, request) -> EndpointResult:
solver = Solver(info=maybe_marshalled_solver)
async with self.service.wallet_state_manager.lock:
- peer: Optional[WSChiaConnection] = self.service.get_full_node_peer()
- if peer is None:
- raise ValueError("No peer connected")
+ peer = self.service.get_full_node_peer()
trade_record, tx_records = await self.service.wallet_state_manager.trade_manager.respond_to_offer(
offer,
peer,
@@ -1866,7 +1832,7 @@ async def did_get_info(self, request) -> EndpointResult:
else:
coin_id = bytes32.from_hexstr(coin_id)
# Get coin state
- peer: Optional[WSChiaConnection] = self.service.get_full_node_peer()
+ peer = self.service.get_full_node_peer()
coin_spend, coin_state = await self.get_latest_singleton_coin_spend(peer, coin_id, request.get("latest", True))
full_puzzle: Program = Program.from_bytes(bytes(coin_spend.puzzle_reveal))
uncurried = uncurry_puzzle(full_puzzle)
@@ -1883,13 +1849,17 @@ async def did_get_info(self, request) -> EndpointResult:
hints.append(memo.hex())
return {
"success": True,
+ "did_id": encode_puzzle_hash(
+ bytes32.from_hexstr(singleton_struct.rest().first().atom.hex()),
+ AddressType.DID.hrp(self.service.config),
+ ),
"latest_coin": coin_state.coin.name().hex(),
"p2_address": encode_puzzle_hash(p2_puzzle.get_tree_hash(), AddressType.XCH.hrp(self.service.config)),
- "public_key": public_key.as_python().hex(),
- "recovery_list_hash": recovery_list_hash.as_python().hex(),
+ "public_key": public_key.atom.hex(),
+ "recovery_list_hash": recovery_list_hash.atom.hex(),
"num_verification": num_verification.as_int(),
"metadata": program_to_metadata(metadata),
- "launcher_id": singleton_struct.rest().first().as_python().hex(),
+ "launcher_id": singleton_struct.rest().first().atom.hex(),
"full_puzzle": full_puzzle,
"solution": Program.from_bytes(bytes(coin_spend.solution)).as_python(),
"hints": hints,
@@ -1911,8 +1881,7 @@ async def did_find_lost_did(self, request) -> EndpointResult:
else:
coin_id = bytes32.from_hexstr(coin_id)
# Get coin state
- peer: Optional[WSChiaConnection] = self.service.get_full_node_peer()
- assert peer is not None
+ peer = self.service.get_full_node_peer()
coin_spend, coin_state = await self.get_latest_singleton_coin_spend(peer, coin_id)
full_puzzle: Program = Program.from_bytes(bytes(coin_spend.puzzle_reveal))
uncurried = uncurry_puzzle(full_puzzle)
@@ -1952,7 +1921,7 @@ async def did_find_lost_did(self, request) -> EndpointResult:
did_puzzle = DID_INNERPUZ_MOD.curry(
our_inner_puzzle, recovery_list_hash, num_verification, singleton_struct, metadata
)
- full_puzzle = create_fullpuz(did_puzzle, launcher_id)
+ full_puzzle = create_singleton_puzzle(did_puzzle, launcher_id)
did_puzzle_empty_recovery = DID_INNERPUZ_MOD.curry(
our_inner_puzzle, Program.to([]).get_tree_hash(), uint64(0), singleton_struct, metadata
)
@@ -1965,14 +1934,14 @@ async def did_find_lost_did(self, request) -> EndpointResult:
did_wallet = wallet
break
- full_puzzle_empty_recovery = create_fullpuz(did_puzzle_empty_recovery, launcher_id)
+ full_puzzle_empty_recovery = create_singleton_puzzle(did_puzzle_empty_recovery, launcher_id)
if full_puzzle.get_tree_hash() != coin_state.coin.puzzle_hash:
if full_puzzle_empty_recovery.get_tree_hash() == coin_state.coin.puzzle_hash:
did_puzzle = did_puzzle_empty_recovery
elif (
did_wallet is not None
and did_wallet.did_info.current_inner is not None
- and create_fullpuz(did_wallet.did_info.current_inner, launcher_id).get_tree_hash()
+ and create_singleton_puzzle(did_wallet.did_info.current_inner, launcher_id).get_tree_hash()
== coin_state.coin.puzzle_hash
):
# Check if the old wallet has the inner puzzle
@@ -1987,7 +1956,7 @@ async def did_find_lost_did(self, request) -> EndpointResult:
did_puzzle = DID_INNERPUZ_MOD.curry(
our_inner_puzzle, recovery_list_hash, num_verification, singleton_struct, metadata
)
- full_puzzle = create_fullpuz(did_puzzle, launcher_id)
+ full_puzzle = create_singleton_puzzle(did_puzzle, launcher_id)
matched = True
if full_puzzle.get_tree_hash() != coin_state.coin.puzzle_hash:
matched = False
@@ -2003,7 +1972,7 @@ async def did_find_lost_did(self, request) -> EndpointResult:
did_puzzle = DID_INNERPUZ_MOD.curry(
our_inner_puzzle, recovery_list_hash, num_verification, singleton_struct, metadata
)
- full_puzzle = create_fullpuz(did_puzzle, launcher_id)
+ full_puzzle = create_singleton_puzzle(did_puzzle, launcher_id)
if full_puzzle.get_tree_hash() == coin_state.coin.puzzle_hash:
matched = True
break
@@ -2671,8 +2640,7 @@ async def nft_get_info(self, request: Dict) -> EndpointResult:
except ValueError:
return {"success": False, "error": f"Invalid Coin ID format for 'coin_id': {request['coin_id']!r}"}
# Get coin state
- peer: Optional[WSChiaConnection] = self.service.get_full_node_peer()
- assert peer is not None
+ peer = self.service.get_full_node_peer()
coin_spend, coin_state = await self.get_latest_singleton_coin_spend(peer, coin_id, request.get("latest", True))
# convert to NFTInfo
# Check if the metadata is updated
@@ -2930,7 +2898,7 @@ async def create_signed_transaction(self, request, hold_lock=True) -> EndpointRe
memos_0 = [] if "memos" not in additions[0] else [mem.encode("utf-8") for mem in additions[0]["memos"]]
- additional_outputs: List[AmountWithPuzzlehash] = []
+ additional_outputs: List[Payment] = []
for addition in additions[1:]:
receiver_ph = bytes32.from_hexstr(addition["puzzle_hash"])
if len(receiver_ph) != 32:
@@ -2939,7 +2907,7 @@ async def create_signed_transaction(self, request, hold_lock=True) -> EndpointRe
if amount > self.service.constants.MAX_COIN_AMOUNT:
raise ValueError(f"Coin amount cannot exceed {self.service.constants.MAX_COIN_AMOUNT}")
memos = [] if "memos" not in addition else [mem.encode("utf-8") for mem in addition["memos"]]
- additional_outputs.append({"puzzlehash": receiver_ph, "amount": amount, "memos": memos})
+ additional_outputs.append(Payment(receiver_ph, amount, memos))
fee: uint64 = uint64(request.get("fee", 0))
min_coin_amount: uint64 = uint64(request.get("min_coin_amount", 0))
@@ -3017,13 +2985,13 @@ async def _generate_signed_transaction() -> EndpointResult:
assert isinstance(wallet, CATWallet)
txs = await wallet.generate_signed_transaction(
- [amount_0] + [output["amount"] for output in additional_outputs],
- [bytes32(puzzle_hash_0)] + [output["puzzlehash"] for output in additional_outputs],
+ [amount_0] + [output.amount for output in additional_outputs],
+ [bytes32(puzzle_hash_0)] + [output.puzzle_hash for output in additional_outputs],
fee,
coins=coins,
exclude_cat_coins=exclude_coins,
ignore_max_send_amount=True,
- memos=[memos_0] + [output["memos"] for output in additional_outputs],
+ memos=[memos_0] + [output.memos for output in additional_outputs],
coin_announcements_to_consume=coin_announcements,
puzzle_announcements_to_consume=puzzle_announcements,
min_coin_amount=min_coin_amount,
@@ -3120,18 +3088,11 @@ async def create_new_dl(self, request) -> Dict:
if self.service.wallet_state_manager is None:
raise ValueError("The wallet service is not currently initialized")
- dl_wallet: DataLayerWallet
- for _, wallet in self.service.wallet_state_manager.wallets.items():
- if WalletType(wallet.type()) == WalletType.DATA_LAYER:
- assert isinstance(wallet, DataLayerWallet)
- dl_wallet = wallet
- break
- else:
+ try:
+ dl_wallet = self.service.wallet_state_manager.get_dl_wallet()
+ except ValueError:
async with self.service.wallet_state_manager.lock:
- dl_wallet = await DataLayerWallet.create_new_dl_wallet(
- self.service.wallet_state_manager,
- self.service.wallet_state_manager.main_wallet,
- )
+ dl_wallet = await DataLayerWallet.create_new_dl_wallet(self.service.wallet_state_manager)
try:
async with self.service.wallet_state_manager.lock:
@@ -3154,24 +3115,17 @@ async def dl_track_new(self, request) -> Dict:
"""Initialize the DataLayer Wallet (only one can exist)"""
if self.service.wallet_state_manager is None:
raise ValueError("The wallet service is not currently initialized")
-
- peer: Optional[WSChiaConnection] = self.service.get_full_node_peer()
- if peer is None:
- raise ValueError("No peer connected")
-
- dl_wallet: DataLayerWallet
- for _, wallet in self.service.wallet_state_manager.wallets.items():
- if WalletType(wallet.type()) == WalletType.DATA_LAYER:
- assert isinstance(wallet, DataLayerWallet)
- dl_wallet = wallet
- break
- else:
+ try:
+ dl_wallet = self.service.wallet_state_manager.get_dl_wallet()
+ except ValueError:
async with self.service.wallet_state_manager.lock:
dl_wallet = await DataLayerWallet.create_new_dl_wallet(
self.service.wallet_state_manager,
- self.service.wallet_state_manager.main_wallet,
)
- await dl_wallet.track_new_launcher_id(bytes32.from_hexstr(request["launcher_id"]), peer)
+ await dl_wallet.track_new_launcher_id(
+ bytes32.from_hexstr(request["launcher_id"]),
+ self.service.get_full_node_peer(),
+ )
return {}
async def dl_stop_tracking(self, request) -> Dict:
@@ -3180,9 +3134,6 @@ async def dl_stop_tracking(self, request) -> Dict:
raise ValueError("The wallet service is not currently initialized")
dl_wallet = self.service.wallet_state_manager.get_dl_wallet()
- if dl_wallet is None:
- raise ValueError("The DataLayer wallet has not been initialized")
-
await dl_wallet.stop_tracking_singleton(bytes32.from_hexstr(request["launcher_id"]))
return {}
@@ -3191,119 +3142,93 @@ async def dl_latest_singleton(self, request) -> Dict:
if self.service.wallet_state_manager is None:
raise ValueError("The wallet service is not currently initialized")
- for _, wallet in self.service.wallet_state_manager.wallets.items():
- if WalletType(wallet.type()) == WalletType.DATA_LAYER:
- assert isinstance(wallet, DataLayerWallet)
- only_confirmed = request.get("only_confirmed")
- if only_confirmed is None:
- only_confirmed = False
- record = await wallet.get_latest_singleton(bytes32.from_hexstr(request["launcher_id"]), only_confirmed)
- return {"singleton": None if record is None else record.to_json_dict()}
-
- raise ValueError("No DataLayer wallet has been initialized")
+ only_confirmed = request.get("only_confirmed")
+ if only_confirmed is None:
+ only_confirmed = False
+ wallet = self.service.wallet_state_manager.get_dl_wallet()
+ record = await wallet.get_latest_singleton(bytes32.from_hexstr(request["launcher_id"]), only_confirmed)
+ return {"singleton": None if record is None else record.to_json_dict()}
async def dl_singletons_by_root(self, request) -> Dict:
"""Get the singleton records that contain the specified root"""
if self.service.wallet_state_manager is None:
raise ValueError("The wallet service is not currently initialized")
- for wallet in self.service.wallet_state_manager.wallets.values():
- if WalletType(wallet.type()) == WalletType.DATA_LAYER:
- assert isinstance(wallet, DataLayerWallet)
- records = await wallet.get_singletons_by_root(
- bytes32.from_hexstr(request["launcher_id"]), bytes32.from_hexstr(request["root"])
- )
- records_json = [rec.to_json_dict() for rec in records]
- return {"singletons": records_json}
-
- raise ValueError("No DataLayer wallet has been initialized")
+ wallet = self.service.wallet_state_manager.get_dl_wallet()
+ records = await wallet.get_singletons_by_root(
+ bytes32.from_hexstr(request["launcher_id"]), bytes32.from_hexstr(request["root"])
+ )
+ records_json = [rec.to_json_dict() for rec in records]
+ return {"singletons": records_json}
async def dl_update_root(self, request) -> Dict:
"""Get the singleton record for the latest singleton of a launcher ID"""
if self.service.wallet_state_manager is None:
raise ValueError("The wallet service is not currently initialized")
- for _, wallet in self.service.wallet_state_manager.wallets.items():
- if WalletType(wallet.type()) == WalletType.DATA_LAYER:
- assert isinstance(wallet, DataLayerWallet)
- async with self.service.wallet_state_manager.lock:
- records = await wallet.create_update_state_spend(
- bytes32.from_hexstr(request["launcher_id"]),
- bytes32.from_hexstr(request["new_root"]),
- fee=uint64(request.get("fee", 0)),
- )
- for record in records:
- await self.service.wallet_state_manager.add_pending_transaction(record)
- return {"tx_record": records[0].to_json_dict_convenience(self.service.config)}
-
- raise ValueError("No DataLayer wallet has been initialized")
+ wallet = self.service.wallet_state_manager.get_dl_wallet()
+ async with self.service.wallet_state_manager.lock:
+ records = await wallet.create_update_state_spend(
+ bytes32.from_hexstr(request["launcher_id"]),
+ bytes32.from_hexstr(request["new_root"]),
+ fee=uint64(request.get("fee", 0)),
+ )
+ for record in records:
+ await self.service.wallet_state_manager.add_pending_transaction(record)
+ return {"tx_record": records[0].to_json_dict_convenience(self.service.config)}
async def dl_update_multiple(self, request) -> Dict:
"""Update multiple singletons with new merkle roots"""
if self.service.wallet_state_manager is None:
return {"success": False, "error": "not_initialized"}
- for _, wallet in self.service.wallet_state_manager.wallets.items():
- if WalletType(wallet.type()) == WalletType.DATA_LAYER:
- assert isinstance(wallet, DataLayerWallet)
- async with self.service.wallet_state_manager.lock:
- # TODO: This method should optionally link the singletons with announcements.
- # Otherwise spends are vulnerable to signature subtraction.
- tx_records: List[TransactionRecord] = []
- for launcher, root in request["updates"].items():
- records = await wallet.create_update_state_spend(
- bytes32.from_hexstr(launcher), bytes32.from_hexstr(root)
- )
- tx_records.extend(records)
- # Now that we have all the txs, we need to aggregate them all into just one spend
- modified_txs: List[TransactionRecord] = []
- aggregate_spend = SpendBundle([], G2Element())
- for tx in tx_records:
- if tx.spend_bundle is not None:
- aggregate_spend = SpendBundle.aggregate([aggregate_spend, tx.spend_bundle])
- modified_txs.append(dataclasses.replace(tx, spend_bundle=None))
- modified_txs[0] = dataclasses.replace(modified_txs[0], spend_bundle=aggregate_spend)
- for tx in modified_txs:
- await self.service.wallet_state_manager.add_pending_transaction(tx)
- return {"tx_records": [rec.to_json_dict_convenience(self.service.config) for rec in modified_txs]}
-
- raise ValueError("No DataLayer wallet has been initialized")
+ wallet = self.service.wallet_state_manager.get_dl_wallet()
+ async with self.service.wallet_state_manager.lock:
+ # TODO: This method should optionally link the singletons with announcements.
+ # Otherwise spends are vulnerable to signature subtraction.
+ tx_records: List[TransactionRecord] = []
+ for launcher, root in request["updates"].items():
+ records = await wallet.create_update_state_spend(
+ bytes32.from_hexstr(launcher), bytes32.from_hexstr(root)
+ )
+ tx_records.extend(records)
+ # Now that we have all the txs, we need to aggregate them all into just one spend
+ modified_txs: List[TransactionRecord] = []
+ aggregate_spend = SpendBundle([], G2Element())
+ for tx in tx_records:
+ if tx.spend_bundle is not None:
+ aggregate_spend = SpendBundle.aggregate([aggregate_spend, tx.spend_bundle])
+ modified_txs.append(dataclasses.replace(tx, spend_bundle=None))
+ modified_txs[0] = dataclasses.replace(modified_txs[0], spend_bundle=aggregate_spend)
+ for tx in modified_txs:
+ await self.service.wallet_state_manager.add_pending_transaction(tx)
+ return {"tx_records": [rec.to_json_dict_convenience(self.service.config) for rec in modified_txs]}
async def dl_history(self, request) -> Dict:
"""Get the singleton record for the latest singleton of a launcher ID"""
if self.service.wallet_state_manager is None:
raise ValueError("The wallet service is not currently initialized")
- for _, wallet in self.service.wallet_state_manager.wallets.items():
- if WalletType(wallet.type()) == WalletType.DATA_LAYER:
- assert isinstance(wallet, DataLayerWallet)
- additional_kwargs = {}
-
- if "min_generation" in request:
- additional_kwargs["min_generation"] = uint32(request["min_generation"])
- if "max_generation" in request:
- additional_kwargs["max_generation"] = uint32(request["max_generation"])
- if "num_results" in request:
- additional_kwargs["num_results"] = uint32(request["num_results"])
+ wallet = self.service.wallet_state_manager.get_dl_wallet()
+ additional_kwargs = {}
- history = await wallet.get_history(bytes32.from_hexstr(request["launcher_id"]), **additional_kwargs)
- history_json = [rec.to_json_dict() for rec in history]
- return {"history": history_json, "count": len(history_json)}
+ if "min_generation" in request:
+ additional_kwargs["min_generation"] = uint32(request["min_generation"])
+ if "max_generation" in request:
+ additional_kwargs["max_generation"] = uint32(request["max_generation"])
+ if "num_results" in request:
+ additional_kwargs["num_results"] = uint32(request["num_results"])
- raise ValueError("No DataLayer wallet has been initialized")
+ history = await wallet.get_history(bytes32.from_hexstr(request["launcher_id"]), **additional_kwargs)
+ history_json = [rec.to_json_dict() for rec in history]
+ return {"history": history_json, "count": len(history_json)}
async def dl_owned_singletons(self, request) -> Dict:
"""Get all owned singleton records"""
if self.service.wallet_state_manager is None:
raise ValueError("The wallet service is not currently initialized")
- for _, wallet in self.service.wallet_state_manager.wallets.items():
- if WalletType(wallet.type()) == WalletType.DATA_LAYER:
- break
- else:
- raise ValueError("No DataLayer wallet has been initialized")
-
- assert isinstance(wallet, DataLayerWallet)
+ wallet = self.service.wallet_state_manager.get_dl_wallet()
singletons = await wallet.get_owned_singletons()
singletons_json = [singleton.to_json_dict() for singleton in singletons]
@@ -3314,13 +3239,7 @@ async def dl_get_mirrors(self, request) -> Dict:
if self.service.wallet_state_manager is None:
raise ValueError("The wallet service is not currently initialized")
- for _, wallet in self.service.wallet_state_manager.wallets.items():
- if WalletType(wallet.type()) == WalletType.DATA_LAYER:
- break
- else:
- raise ValueError("No DataLayer wallet has been initialized")
-
- assert isinstance(wallet, DataLayerWallet)
+ wallet = self.service.wallet_state_manager.get_dl_wallet()
mirrors_json = []
for mirror in await wallet.get_mirrors_for_launcher(bytes32.from_hexstr(request["launcher_id"])):
mirrors_json.append(mirror.to_json_dict())
@@ -3332,14 +3251,7 @@ async def dl_new_mirror(self, request) -> Dict:
if self.service.wallet_state_manager is None:
raise ValueError("The wallet service is not currently initialized")
- for _, wallet in self.service.wallet_state_manager.wallets.items():
- if WalletType(wallet.type()) == WalletType.DATA_LAYER:
- dl_wallet = wallet
- break
- else:
- raise ValueError("No DataLayer wallet has been initialized")
-
- assert isinstance(dl_wallet, DataLayerWallet)
+ dl_wallet = self.service.wallet_state_manager.get_dl_wallet()
async with self.service.wallet_state_manager.lock:
txs = await dl_wallet.create_new_mirror(
bytes32.from_hexstr(request["launcher_id"]),
@@ -3359,22 +3271,12 @@ async def dl_delete_mirror(self, request) -> Dict:
if self.service.wallet_state_manager is None:
raise ValueError("The wallet service is not currently initialized")
- peer: Optional[WSChiaConnection] = self.service.get_full_node_peer()
- if peer is None:
- raise ValueError("No peer connected")
-
- for _, wallet in self.service.wallet_state_manager.wallets.items():
- if WalletType(wallet.type()) == WalletType.DATA_LAYER:
- assert isinstance(wallet, DataLayerWallet)
- dl_wallet: DataLayerWallet = wallet
- break
- else:
- raise ValueError("No DataLayer wallet has been initialized")
+ dl_wallet = self.service.wallet_state_manager.get_dl_wallet()
async with self.service.wallet_state_manager.lock:
txs = await dl_wallet.delete_mirror(
bytes32.from_hexstr(request["coin_id"]),
- peer,
+ self.service.get_full_node_peer(),
fee=request.get("fee", uint64(0)),
)
for tx in txs:
@@ -3383,3 +3285,185 @@ async def dl_delete_mirror(self, request) -> Dict:
return {
"transactions": [tx.to_json_dict_convenience(self.service.config) for tx in txs],
}
+
+ ##########################################################################################
+ # Verified Credential
+ ##########################################################################################
+ async def vc_mint(self, request) -> Dict:
+ """
+ Mint a verified credential using the assigned DID
+ :param request: We require 'did_id' that will be minting the VC and options for a new 'target_address' as well
+ as a 'fee' for the mint tx
+ :return: a 'vc_record' containing all the information of the soon-to-be-confirmed vc as well as any relevant
+ 'transactions'
+ """
+
+ @streamable
+ @dataclasses.dataclass(frozen=True)
+ class VCMint(Streamable):
+ did_id: str
+ target_address: Optional[str] = None
+ fee: uint64 = uint64(0)
+
+ parsed_request = VCMint.from_json_dict(request)
+
+ did_id = decode_puzzle_hash(parsed_request.did_id)
+ puzhash: Optional[bytes32] = None
+ if parsed_request.target_address is not None:
+ puzhash = decode_puzzle_hash(parsed_request.target_address)
+
+ vc_wallet: VCWallet = await self.service.wallet_state_manager.get_or_create_vc_wallet()
+ vc_record, tx_list = await vc_wallet.launch_new_vc(did_id, puzhash, parsed_request.fee)
+ for tx in tx_list:
+ await self.service.wallet_state_manager.add_pending_transaction(tx)
+ return {
+ "vc_record": vc_record.to_json_dict(),
+ "transactions": [tx.to_json_dict_convenience(self.service.config) for tx in tx_list],
+ }
+
+ async def vc_get(self, request) -> Dict:
+ """
+ Given a launcher ID get the verified credential
+ :param request: the 'vc_id' launcher id of a verifiable credential
+ :return: the 'vc_record' representing the specified verifiable credential
+ """
+
+ @streamable
+ @dataclasses.dataclass(frozen=True)
+ class VCGet(Streamable):
+ vc_id: bytes32
+
+ parsed_request = VCGet.from_json_dict(request)
+
+ vc_record = await self.service.wallet_state_manager.vc_store.get_vc_record(parsed_request.vc_id)
+ return {"vc_record": vc_record}
+
+ async def vc_get_list(self, request) -> Dict:
+ """
+ Get a list of verified credentials
+ :param request: optional parameters for pagination 'start' and 'count'
+ :return: all 'vc_records' in the specified range and any 'proofs' associated with the roots contained within
+ """
+
+ @streamable
+ @dataclasses.dataclass(frozen=True)
+ class VCGetList(Streamable):
+ start: uint32 = uint32(0)
+ end: uint32 = uint32(50)
+
+ parsed_request = VCGetList.from_json_dict(request)
+
+ vc_list = await self.service.wallet_state_manager.vc_store.get_vc_record_list(
+ parsed_request.start, parsed_request.end
+ )
+ return {
+ "vc_records": [{"coin_id": "0x" + vc.vc.coin.name().hex(), **vc.to_json_dict()} for vc in vc_list],
+ "proofs": {
+ rec.vc.proof_hash.hex(): None if fetched_proof is None else fetched_proof.key_value_pairs
+ for rec in vc_list
+ if rec.vc.proof_hash is not None
+ for fetched_proof in (
+ await self.service.wallet_state_manager.vc_store.get_proofs_for_root(rec.vc.proof_hash),
+ )
+ },
+ }
+
+ async def vc_spend(self, request) -> Dict:
+ """
+ Spend a verified credential
+ :param request: Required 'vc_id' launcher id of the vc we wish to spend. Optional paramaters for a 'new_puzhash'
+ for the vc to end up at and 'new_proof_hash' & 'provider_inner_puzhash' which can be used to update the vc's
+ proofs. Also standard 'fee' & 'reuse_puzhash' parameters for the transaction.
+ :return: a list of all relevant 'transactions' to perform this spend
+ """
+
+ @streamable
+ @dataclasses.dataclass(frozen=True)
+ class VCSpend(Streamable):
+ vc_id: bytes32
+ new_puzhash: Optional[bytes32] = None
+ new_proof_hash: Optional[bytes32] = None
+ provider_inner_puzhash: Optional[bytes32] = None
+ fee: uint64 = uint64(0)
+ reuse_puzhash: Optional[bool] = None
+
+ parsed_request = VCSpend.from_json_dict(request)
+
+ vc_wallet: VCWallet = await self.service.wallet_state_manager.get_or_create_vc_wallet()
+ txs = await vc_wallet.generate_signed_transaction(
+ parsed_request.vc_id,
+ parsed_request.fee,
+ parsed_request.new_puzhash,
+ new_proof_hash=parsed_request.new_proof_hash,
+ provider_inner_puzhash=parsed_request.provider_inner_puzhash,
+ reuse_puzhash=parsed_request.reuse_puzhash,
+ )
+ for tx in txs:
+ await self.service.wallet_state_manager.add_pending_transaction(tx)
+
+ return {
+ "transactions": [tx.to_json_dict_convenience(self.service.config) for tx in txs],
+ }
+
+ async def vc_add_proofs(self, request) -> Dict:
+ """
+ Add a set of proofs to the DB that can be used when spending a VC. VCs are near useless until their proofs have
+ been added.
+ :param request: 'proofs' is a dictionary of key/value pairs
+ :return:
+ """
+ vc_wallet: VCWallet = await self.service.wallet_state_manager.get_or_create_vc_wallet()
+
+ await vc_wallet.store.add_vc_proofs(VCProofs(request["proofs"]))
+
+ return {}
+
+ async def vc_get_proofs_for_root(self, request) -> Dict:
+ """
+ Given a specified vc root, get any proofs associated with that root.
+ :param request: must specify 'root' representing the tree hash of some set of proofs
+ :return: a dictionary of root hashes mapped to dictionaries of key value pairs of 'proofs'
+ """
+
+ @streamable
+ @dataclasses.dataclass(frozen=True)
+ class VCGetProofsForRoot(Streamable):
+ root: bytes32
+
+ parsed_request = VCGetProofsForRoot.from_json_dict(request)
+ vc_wallet: VCWallet = await self.service.wallet_state_manager.get_or_create_vc_wallet()
+
+ vc_proofs: Optional[VCProofs] = await vc_wallet.store.get_proofs_for_root(parsed_request.root)
+ if vc_proofs is None:
+ raise ValueError("no proofs found for specified root") # pragma: no cover
+ return {"proofs": vc_proofs.key_value_pairs}
+
+ async def vc_revoke(self, request) -> Dict:
+ """
+ Revoke an on chain VC provided the correct DID is available
+ :param request: required 'vc_parent_id' for the VC coin. Standard transaction params 'fee' & 'reuse_puzhash'.
+ :return: all relevant 'transactions'
+ """
+
+ @streamable
+ @dataclasses.dataclass(frozen=True)
+ class VCRevoke(Streamable):
+ vc_parent_id: bytes32
+ fee: uint64 = uint64(0)
+ reuse_puzhash: Optional[bool] = None
+
+ parsed_request = VCRevoke.from_json_dict(request)
+ vc_wallet: VCWallet = await self.service.wallet_state_manager.get_or_create_vc_wallet()
+
+ txs = await vc_wallet.revoke_vc(
+ parsed_request.vc_parent_id,
+ self.service.get_full_node_peer(),
+ parsed_request.fee,
+ parsed_request.reuse_puzhash,
+ )
+ for tx in txs:
+ await self.service.wallet_state_manager.add_pending_transaction(tx)
+
+ return {
+ "transactions": [tx.to_json_dict_convenience(self.service.config) for tx in txs],
+ }
diff --git a/chia/rpc/wallet_rpc_client.py b/chia/rpc/wallet_rpc_client.py
--- a/chia/rpc/wallet_rpc_client.py
+++ b/chia/rpc/wallet_rpc_client.py
@@ -10,13 +10,16 @@
from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.coin_record import CoinRecord
+from chia.util.bech32m import encode_puzzle_hash
from chia.util.ints import uint16, uint32, uint64
from chia.wallet.notification_store import Notification
from chia.wallet.trade_record import TradeRecord
from chia.wallet.trading.offer import Offer
from chia.wallet.transaction_record import TransactionRecord
from chia.wallet.transaction_sorting import SortKey
+from chia.wallet.util.query_filter import TransactionTypeFilter
from chia.wallet.util.wallet_types import WalletType
+from chia.wallet.vc_wallet.vc_store import VCRecord
def parse_result_transactions(result: Dict[str, Any]) -> Dict[str, Any]:
@@ -126,6 +129,7 @@ async def get_transactions(
sort_key: SortKey = None,
reverse: bool = False,
to_address: Optional[str] = None,
+ type_filter: Optional[TransactionTypeFilter] = None,
) -> List[TransactionRecord]:
request: Dict[str, Any] = {"wallet_id": wallet_id}
@@ -140,6 +144,9 @@ async def get_transactions(
if to_address is not None:
request["to_address"] = to_address
+ if type_filter is not None:
+ request["type_filter"] = type_filter.to_json_dict()
+
res = await self.fetch(
"get_transactions",
request,
@@ -421,6 +428,14 @@ async def get_did_id(self, wallet_id: int) -> Dict:
response = await self.fetch("did_get_did", request)
return response
+ async def get_did_info(self, coin_id: str, latest: bool) -> Dict:
+ request: Dict[str, Any] = {
+ "coin_id": coin_id,
+ "latest": latest,
+ }
+ response = await self.fetch("did_get_info", request)
+ return response
+
async def create_did_backup_file(self, wallet_id: int, filename: str) -> Dict:
request: Dict[str, Any] = {
"wallet_id": wallet_id,
@@ -452,6 +467,17 @@ async def get_did_recovery_list(self, wallet_id: int) -> Dict:
response = await self.fetch("did_get_recovery_list", request)
return response
+ async def did_message_spend(
+ self, wallet_id: int, puzzle_announcements: List[str], coin_announcements: List[str]
+ ) -> Dict:
+ request: Dict[str, Any] = {
+ "wallet_id": wallet_id,
+ "coin_announcements": coin_announcements,
+ "puzzle_announcements": puzzle_announcements,
+ }
+ response = await self.fetch("did_message_spend", request)
+ return response
+
async def update_did_metadata(
self,
wallet_id: int,
@@ -473,6 +499,21 @@ async def get_did_metadata(self, wallet_id: int) -> Dict:
response = await self.fetch("did_get_metadata", request)
return response
+ async def find_lost_did(
+ self, coin_id: str, recovery_list_hash: Optional[str], metadata: Optional[Dict], num_verification: Optional[int]
+ ) -> Dict:
+ request: Dict[str, Any] = {
+ "coin_id": coin_id,
+ }
+ if recovery_list_hash is not None:
+ request["recovery_list_hash"] = recovery_list_hash
+ if metadata is not None:
+ request["metadata"] = (metadata,)
+ if num_verification is not None:
+ request["num_verification"] = num_verification
+ response = await self.fetch("did_find_lost_did", request)
+ return response
+
async def create_new_did_wallet_from_recovery(self, filename: str) -> Dict:
request: Dict[str, Any] = {
"wallet_type": "did_wallet",
@@ -914,7 +955,7 @@ async def count_nfts(self, wallet_id: Optional[int]):
return response
async def list_nfts(self, wallet_id):
- request: Dict[str, Any] = {"wallet_id": wallet_id}
+ request: Dict[str, Any] = {"wallet_id": wallet_id, "num": 100_000}
response = await self.fetch("nft_get_nfts", request)
return response
@@ -1125,3 +1166,68 @@ async def sign_message_by_address(self, address: str, message: str) -> Tuple[str
async def sign_message_by_id(self, id: str, message: str) -> Tuple[str, str, str]:
response = await self.fetch("sign_message_by_id", {"id": id, "message": message})
return response["pubkey"], response["signature"], response["signing_mode"]
+
+ async def vc_mint(
+ self, did_id: bytes32, target_address: Optional[bytes32] = None, fee: uint64 = uint64(0)
+ ) -> Tuple[VCRecord, List[TransactionRecord]]:
+ response = await self.fetch(
+ "vc_mint",
+ {
+ "did_id": encode_puzzle_hash(did_id, "rpc"),
+ "target_address": encode_puzzle_hash(target_address, "rpc") if target_address is not None else None,
+ "fee": fee,
+ },
+ )
+ return VCRecord.from_json_dict(response["vc_record"]), [
+ TransactionRecord.from_json_dict_convenience(tx) for tx in response["transactions"]
+ ]
+
+ async def vc_get(self, vc_id: bytes32) -> Optional[VCRecord]:
+ response = await self.fetch("vc_get", {"vc_id": vc_id.hex()})
+ return None if response["vc_record"] is None else VCRecord.from_json_dict(response["vc_record"])
+
+ async def vc_get_list(self, start: int = 0, count: int = 50) -> Tuple[List[VCRecord], Dict[str, Any]]:
+ response = await self.fetch("vc_get_list", {"start": start, "count": count})
+ return [VCRecord.from_json_dict(rec) for rec in response["vc_records"]], response["proofs"]
+
+ async def vc_spend(
+ self,
+ vc_id: bytes32,
+ new_puzhash: Optional[bytes32] = None,
+ new_proof_hash: Optional[bytes32] = None,
+ provider_inner_puzhash: Optional[bytes32] = None,
+ fee: uint64 = uint64(0),
+ reuse_puzhash: Optional[bool] = None,
+ ) -> List[TransactionRecord]:
+ response = await self.fetch(
+ "vc_spend",
+ {
+ "vc_id": vc_id.hex(),
+ "new_puzhash": new_puzhash.hex() if new_puzhash is not None else new_puzhash,
+ "new_proof_hash": new_proof_hash.hex() if new_proof_hash is not None else new_proof_hash,
+ "provider_inner_puzhash": provider_inner_puzhash.hex()
+ if provider_inner_puzhash is not None
+ else provider_inner_puzhash,
+ "fee": fee,
+ "reuse_puzhash": reuse_puzhash,
+ },
+ )
+ return [TransactionRecord.from_json_dict_convenience(tx) for tx in response["transactions"]]
+
+ async def vc_add_proofs(self, proofs: Dict[str, Any]) -> None:
+ await self.fetch("vc_add_proofs", {"proofs": proofs})
+
+ async def vc_get_proofs_for_root(self, root: bytes32) -> Dict[str, Any]:
+ response = await self.fetch("vc_get_proofs_for_root", {"root": root.hex()})
+ return response["proofs"]
+
+ async def vc_revoke(
+ self,
+ vc_parent_id: bytes32,
+ fee: uint64 = uint64(0),
+ reuse_puzhash: Optional[bool] = None,
+ ) -> List[TransactionRecord]:
+ response = await self.fetch(
+ "vc_revoke", {"vc_parent_id": vc_parent_id.hex(), "fee": fee, "reuse_puzhash": reuse_puzhash}
+ )
+ return [TransactionRecord.from_json_dict_convenience(tx) for tx in response["transactions"]]
diff --git a/chia/seeder/crawler.py b/chia/seeder/crawler.py
--- a/chia/seeder/crawler.py
+++ b/chia/seeder/crawler.py
@@ -23,7 +23,7 @@
from chia.server.ws_connection import WSChiaConnection
from chia.types.peer_info import PeerInfo
from chia.util.ints import uint32, uint64
-from chia.util.network import get_host_addr
+from chia.util.network import resolve
from chia.util.path import path_from_root
log = logging.getLogger(__name__)
@@ -127,9 +127,7 @@ async def peer_action(peer: WSChiaConnection):
try:
connected = await self.create_client(
- PeerInfo(
- str(get_host_addr(peer.ip_address, prefer_ipv6=self.config.get("prefer_ipv6", False))), peer.port
- ),
+ PeerInfo(await resolve(peer.ip_address, prefer_ipv6=self.config.get("prefer_ipv6", False)), peer.port),
peer_action,
)
if not connected:
diff --git a/chia/seeder/dns_server.py b/chia/seeder/dns_server.py
--- a/chia/seeder/dns_server.py
+++ b/chia/seeder/dns_server.py
@@ -208,7 +208,9 @@ async def dns_response(self, data):
}
qname = request.q.qname
- qn = str(qname)
+ # DNS labels are mixed case with DNS resolvers that implement the use of bit 0x20 to improve
+ # transaction identity. See https://datatracker.ietf.org/doc/html/draft-vixie-dnsext-dns0x20-00
+ qn = str(qname).lower()
qtype = request.q.qtype
qt = QTYPE[qtype]
if qn == D or qn.endswith("." + D):
diff --git a/chia/seeder/start_crawler.py b/chia/seeder/start_crawler.py
--- a/chia/seeder/start_crawler.py
+++ b/chia/seeder/start_crawler.py
@@ -54,7 +54,6 @@ def create_full_node_crawler_service(
advertised_port=service_config["port"],
service_name="full_node",
upnp_ports=[],
- server_listen_ports=[service_config["port"]],
on_connect_callback=crawler.on_connect,
network_id=network_id,
rpc_info=rpc_info,
diff --git a/chia/server/capabilities.py b/chia/server/capabilities.py
--- a/chia/server/capabilities.py
+++ b/chia/server/capabilities.py
@@ -5,22 +5,20 @@
from chia.protocols.shared_protocol import Capability
from chia.util.ints import uint16
-_capability_values = {int(capability) for capability in Capability}
-
def known_active_capabilities(values: Iterable[Tuple[uint16, str]]) -> List[Capability]:
# NOTE: order is not guaranteed
# TODO: what if there's a claim for both supporting and not?
# presently it considers it supported
- filtered: Set[uint16] = set()
+ filtered: Set[Capability] = set()
for value, state in values:
if state != "1":
continue
- if value not in _capability_values:
- continue
-
- filtered.add(value)
+ try:
+ filtered.add(Capability(value))
+ except ValueError:
+ pass
# TODO: consider changing all uses to sets instead of lists
- return [Capability(value) for value in filtered]
+ return list(filtered)
diff --git a/chia/server/node_discovery.py b/chia/server/node_discovery.py
--- a/chia/server/node_discovery.py
+++ b/chia/server/node_discovery.py
@@ -22,10 +22,10 @@
from chia.server.peer_store_resolver import PeerStoreResolver
from chia.server.server import ChiaServer
from chia.server.ws_connection import WSChiaConnection
-from chia.types.peer_info import PeerInfo, TimestampedPeerInfo
+from chia.types.peer_info import PeerInfo, TimestampedPeerInfo, UnresolvedPeerInfo
from chia.util.hash import std_hash
from chia.util.ints import uint16, uint64
-from chia.util.network import IPAddress, get_host_addr
+from chia.util.network import IPAddress, resolve
MAX_PEERS_RECEIVED_PER_REQUEST = 1000
MAX_TOTAL_PEERS_RECEIVED = 3000
@@ -61,15 +61,9 @@ def __init__(
self.peers_file_path = peer_store_resolver.peers_file_path
self.dns_servers = dns_servers
random.shuffle(dns_servers) # Don't always start with the same DNS server
+ self.introducer_info: Optional[UnresolvedPeerInfo] = None
if introducer_info is not None:
- # get_host_addr is blocking but this only gets called on startup or in the wallet after disconnecting from
- # all trusted peers.
- self.introducer_info: Optional[PeerInfo] = PeerInfo(
- str(get_host_addr(introducer_info["host"], prefer_ipv6=False)),
- introducer_info["port"],
- )
- else:
- self.introducer_info = None
+ self.introducer_info = UnresolvedPeerInfo(introducer_info["host"], introducer_info["port"])
self.peer_connect_interval = peer_connect_interval
self.log = log
self.relay_queue: Optional[asyncio.Queue[Tuple[TimestampedPeerInfo, int]]] = None
@@ -152,7 +146,7 @@ async def on_connect(self, peer: WSChiaConnection) -> None:
and self.address_manager is not None
):
timestamped_peer_info = TimestampedPeerInfo(
- peer.peer_host,
+ peer.peer_info.host,
peer.peer_server_port,
uint64(int(time.time())),
)
@@ -212,7 +206,9 @@ async def on_connect(peer: WSChiaConnection) -> None:
msg = make_msg(ProtocolMessageTypes.request_peers_introducer, RequestPeersIntroducer())
await peer.send_message(msg)
- await self.server.start_client(self.introducer_info, on_connect)
+ await self.server.start_client(
+ PeerInfo(await resolve(self.introducer_info.host, prefer_ipv6=False), self.introducer_info.port), on_connect
+ )
async def _query_dns(self, dns_address: str) -> None:
try:
diff --git a/chia/server/reconnect_task.py b/chia/server/reconnect_task.py
deleted file mode 100644
--- a/chia/server/reconnect_task.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from __future__ import annotations
-
-import asyncio
-from logging import Logger
-
-from chia.server.server import ChiaServer
-from chia.types.peer_info import PeerInfo
-
-
-def start_reconnect_task(server: ChiaServer, peer_info: PeerInfo, log: Logger) -> asyncio.Task[None]:
- """
- Start a background task that checks connection and reconnects periodically to a peer.
- """
-
- async def connection_check() -> None:
- while True:
- peer_retry = True
- for _, connection in server.all_connections.items():
- if connection.get_peer_info() == peer_info:
- peer_retry = False
- if peer_retry:
- log.info(f"Reconnecting to peer {peer_info}")
- try:
- await server.start_client(peer_info, None)
- except Exception as e:
- log.info(f"Failed to connect to {peer_info} {e}")
- await asyncio.sleep(3)
-
- return asyncio.create_task(connection_check())
diff --git a/chia/server/server.py b/chia/server/server.py
--- a/chia/server/server.py
+++ b/chia/server/server.py
@@ -36,7 +36,7 @@
from chia.types.peer_info import PeerInfo
from chia.util.errors import Err, ProtocolError
from chia.util.ints import uint16
-from chia.util.network import WebServer, is_in_network, is_localhost
+from chia.util.network import WebServer, is_in_network, is_localhost, is_trusted_peer
from chia.util.ssl_check import verify_ssl_certs_and_keys
max_message_size = 50 * 1024 * 1024 # 50MB
@@ -45,14 +45,14 @@
def ssl_context_for_server(
ca_cert: Path,
ca_key: Path,
- private_cert_path: Path,
- private_key_path: Path,
+ cert_path: Path,
+ key_path: Path,
*,
check_permissions: bool = True,
log: Optional[logging.Logger] = None,
) -> ssl.SSLContext:
if check_permissions:
- verify_ssl_certs_and_keys([ca_cert, private_cert_path], [ca_key, private_key_path], log)
+ verify_ssl_certs_and_keys([ca_cert, cert_path], [ca_key, key_path], log)
ssl_context = ssl._create_unverified_context(purpose=ssl.Purpose.CLIENT_AUTH, cafile=str(ca_cert))
ssl_context.check_hostname = False
@@ -71,7 +71,7 @@ def ssl_context_for_server(
"ECDHE-RSA-AES128-SHA256"
)
)
- ssl_context.load_cert_chain(certfile=str(private_cert_path), keyfile=str(private_key_path))
+ ssl_context.load_cert_chain(certfile=str(cert_path), keyfile=str(key_path))
ssl_context.verify_mode = ssl.CERT_REQUIRED
return ssl_context
@@ -89,18 +89,18 @@ def ssl_context_for_root(
def ssl_context_for_client(
ca_cert: Path,
ca_key: Path,
- private_cert_path: Path,
- private_key_path: Path,
+ cert_path: Path,
+ key_path: Path,
*,
check_permissions: bool = True,
log: Optional[logging.Logger] = None,
) -> ssl.SSLContext:
if check_permissions:
- verify_ssl_certs_and_keys([ca_cert, private_cert_path], [ca_key, private_key_path], log)
+ verify_ssl_certs_and_keys([ca_cert, cert_path], [ca_key, key_path], log)
ssl_context = ssl._create_unverified_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=str(ca_cert))
ssl_context.check_hostname = False
- ssl_context.load_cert_chain(certfile=str(private_cert_path), keyfile=str(private_key_path))
+ ssl_context.load_cert_chain(certfile=str(cert_path), keyfile=str(key_path))
ssl_context.verify_mode = ssl.CERT_REQUIRED
return ssl_context
@@ -176,30 +176,40 @@ def create(
# Authenticated clients
private_cert_path, private_key_path = private_ssl_paths(root_path, config)
ssl_client_context = ssl_context_for_client(
- ca_private_crt_path, ca_private_key_path, private_cert_path, private_key_path
+ ca_cert=ca_private_crt_path,
+ ca_key=ca_private_key_path,
+ cert_path=private_cert_path,
+ key_path=private_key_path,
)
else:
# Public clients
public_cert_path, public_key_path = public_ssl_paths(root_path, config)
ssl_client_context = ssl_context_for_client(
- chia_ca_crt_path, chia_ca_key_path, public_cert_path, public_key_path
+ ca_cert=chia_ca_crt_path,
+ ca_key=chia_ca_key_path,
+ cert_path=public_cert_path,
+ key_path=public_key_path,
)
if local_type in authenticated_server_types:
# Authenticated servers
private_cert_path, private_key_path = private_ssl_paths(root_path, config)
ssl_context = ssl_context_for_server(
- ca_private_crt_path,
- ca_private_key_path,
- private_cert_path,
- private_key_path,
+ ca_cert=ca_private_crt_path,
+ ca_key=ca_private_key_path,
+ cert_path=private_cert_path,
+ key_path=private_key_path,
log=log,
)
else:
# Public servers
public_cert_path, public_key_path = public_ssl_paths(root_path, config)
ssl_context = ssl_context_for_server(
- chia_ca_crt_path, chia_ca_key_path, public_cert_path, public_key_path, log=log
+ ca_cert=chia_ca_crt_path,
+ ca_key=chia_ca_key_path,
+ cert_path=public_cert_path,
+ key_path=public_key_path,
+ log=log,
)
node_id_cert_path = private_cert_path if public_cert_path is None else public_cert_path
@@ -250,7 +260,7 @@ async def garbage_collect_connections_task(self) -> None:
if time.time() - connection.last_message_time > 1800:
to_remove.append(connection)
for connection in to_remove:
- self.log.debug(f"Garbage collecting connection {connection.peer_host} due to inactivity")
+ self.log.debug(f"Garbage collecting connection {connection.peer_info.host} due to inactivity")
if connection.closed:
self.all_connections.pop(connection.peer_node_id)
else:
@@ -264,26 +274,29 @@ async def garbage_collect_connections_task(self) -> None:
for peer_ip in to_remove_ban:
del self.banned_peers[peer_ip]
- async def start_server(self, prefer_ipv6: bool, on_connect: Optional[ConnectionCallback] = None) -> None:
+ async def start(
+ self,
+ listen: bool,
+ prefer_ipv6: bool,
+ on_connect: Optional[ConnectionCallback] = None,
+ ) -> None:
if self.webserver is not None:
raise RuntimeError("ChiaServer already started")
if self.gc_task is None:
self.gc_task = asyncio.create_task(self.garbage_collect_connections_task())
- if self._local_type in [NodeType.WALLET, NodeType.HARVESTER, NodeType.TIMELORD]:
- return None
-
- self.on_connect = on_connect
- self.webserver = await WebServer.create(
- hostname="",
- port=uint16(self._port),
- routes=[web.get("/ws", self.incoming_connection)],
- ssl_context=self.ssl_context,
- prefer_ipv6=prefer_ipv6,
- logger=self.log,
- )
- self._port = int(self.webserver.listen_port)
- self.log.info(f"Started listening on port: {self._port}")
+ if listen:
+ self.on_connect = on_connect
+ self.webserver = await WebServer.create(
+ hostname="",
+ port=uint16(self._port),
+ routes=[web.get("/ws", self.incoming_connection)],
+ ssl_context=self.ssl_context,
+ prefer_ipv6=prefer_ipv6,
+ logger=self.log,
+ )
+ self._port = int(self.webserver.listen_port)
+ self.log.info(f"Started listening on port: {self._port}")
async def incoming_connection(self, request: web.Request) -> web.StreamResponse:
if getattr(self.node, "crawl", None) is not None:
@@ -309,26 +322,25 @@ async def incoming_connection(self, request: web.Request) -> web.StreamResponse:
connection: Optional[WSChiaConnection] = None
try:
connection = WSChiaConnection.create(
- self._local_type,
- ws,
- self.api,
- self._port,
- self.log,
- False,
- self.received_message_callback,
- request.remote,
- self.connection_closed,
- peer_id,
- self._inbound_rate_limit_percent,
- self._outbound_rate_limit_percent,
- self._local_capabilities_for_handshake,
+ local_type=self._local_type,
+ ws=ws,
+ api=self.api,
+ server_port=self._port,
+ log=self.log,
+ is_outbound=False,
+ received_message_callback=self.received_message_callback,
+ close_callback=self.connection_closed,
+ peer_id=peer_id,
+ inbound_rate_limit_percent=self._inbound_rate_limit_percent,
+ outbound_rate_limit_percent=self._outbound_rate_limit_percent,
+ local_capabilities_for_handshake=self._local_capabilities_for_handshake,
)
await connection.perform_handshake(self._network_id, protocol_version, self._port, self._local_type)
assert connection.connection_type is not None, "handshake failed to set connection type, still None"
# Limit inbound connections to config's specifications.
if not self.accept_inbound_connections(connection.connection_type) and not is_in_network(
- connection.peer_host, self.exempt_peer_networks
+ connection.peer_info.host, self.exempt_peer_networks
):
self.log.info(
f"Not accepting inbound connection: {connection.get_peer_logging()}.Inbound limit reached."
@@ -369,7 +381,7 @@ async def connection_added(
# If we already had a connection to this peer_id, close the old one. This is secure because peer_ids are based
# on TLS public keys
if connection.closed:
- self.log.debug(f"ignoring unexpected request to add closed connection {connection.peer_host} ")
+ self.log.debug(f"ignoring unexpected request to add closed connection {connection.peer_info.host} ")
return
if connection.peer_node_id in self.all_connections:
@@ -388,7 +400,7 @@ def is_duplicate_or_self_connection(self, target_node: PeerInfo) -> bool:
self.log.debug(f"Not connecting to {target_node}")
return True
for connection in self.all_connections.values():
- if connection.peer_host == target_node.host and connection.peer_server_port == target_node.port:
+ if connection.peer_info.host == target_node.host and connection.peer_server_port == target_node.port:
self.log.debug(f"Not connecting to {target_node}, duplicate connection")
return True
return False
@@ -450,22 +462,22 @@ async def start_client(
der_cert = x509.load_der_x509_certificate(cert_bytes, default_backend())
peer_id = bytes32(der_cert.fingerprint(hashes.SHA256()))
if peer_id == self.node_id:
- raise RuntimeError(f"Trying to connect to a peer ({target_node}) with the same peer_id: {peer_id}")
+ self.log.info(f"Connected to a node with the same peer ID, disconnecting: {target_node} {peer_id}")
+ return False
connection = WSChiaConnection.create(
- self._local_type,
- ws,
- self.api,
- self._port,
- self.log,
- True,
- self.received_message_callback,
- target_node.host,
- self.connection_closed,
- peer_id,
- self._inbound_rate_limit_percent,
- self._outbound_rate_limit_percent,
- self._local_capabilities_for_handshake,
+ local_type=self._local_type,
+ ws=ws,
+ api=self.api,
+ server_port=self._port,
+ log=self.log,
+ is_outbound=True,
+ received_message_callback=self.received_message_callback,
+ close_callback=self.connection_closed,
+ peer_id=peer_id,
+ inbound_rate_limit_percent=self._inbound_rate_limit_percent,
+ outbound_rate_limit_percent=self._outbound_rate_limit_percent,
+ local_capabilities_for_handshake=self._local_capabilities_for_handshake,
session=session,
)
await connection.perform_handshake(self._network_id, protocol_version, self._port, self._local_type)
@@ -510,28 +522,29 @@ def connection_closed(self, connection: WSChiaConnection, ban_time: int, closed_
# in this case we still want to do the banning logic and remove the conection from the list
# but the other cleanup should already have been done so we skip that
- if is_localhost(connection.peer_host) and ban_time != 0:
+ if is_localhost(connection.peer_info.host) and ban_time != 0:
self.log.warning(f"Trying to ban localhost for {ban_time}, but will not ban")
ban_time = 0
if ban_time > 0:
ban_until: float = time.time() + ban_time
- self.log.warning(f"Banning {connection.peer_host} for {ban_time} seconds")
- if connection.peer_host in self.banned_peers:
- if ban_until > self.banned_peers[connection.peer_host]:
- self.banned_peers[connection.peer_host] = ban_until
+ self.log.warning(f"Banning {connection.peer_info.host} for {ban_time} seconds")
+ if connection.peer_info.host in self.banned_peers:
+ if ban_until > self.banned_peers[connection.peer_info.host]:
+ self.banned_peers[connection.peer_info.host] = ban_until
else:
- self.banned_peers[connection.peer_host] = ban_until
+ self.banned_peers[connection.peer_info.host] = ban_until
- if connection.peer_node_id in self.all_connections:
+ present_connection = self.all_connections.get(connection.peer_node_id)
+ if present_connection is connection:
self.all_connections.pop(connection.peer_node_id)
if not closed_connection:
- self.log.info(f"Connection closed: {connection.peer_host}, node id: {connection.peer_node_id}")
+ self.log.info(f"Connection closed: {connection.peer_info.host}, node id: {connection.peer_node_id}")
if connection.connection_type is None:
# This means the handshake was never finished with this peer
self.log.debug(
- f"Invalid connection type for connection {connection.peer_host},"
+ f"Invalid connection type for connection {connection.peer_info.host},"
f" while closing. Handshake never finished."
)
connection.cancel_tasks()
@@ -560,9 +573,9 @@ async def validate_broadcast_message_type(self, messages: List[Message], node_ty
for _, connection in self.all_connections.items():
if connection.connection_type is node_type:
await connection.close(
- self.invalid_protocol_ban_seconds,
- WSCloseCode.INTERNAL_ERROR,
- Err.INTERNAL_PROTOCOL_ERROR,
+ ban_time=self.invalid_protocol_ban_seconds,
+ ws_close_code=WSCloseCode.INTERNAL_ERROR,
+ error=Err.INTERNAL_PROTOCOL_ERROR,
)
raise ProtocolError(Err.INTERNAL_PROTOCOL_ERROR, [message.type])
@@ -674,14 +687,12 @@ def accept_inbound_connections(self, node_type: NodeType) -> bool:
return True
def is_trusted_peer(self, peer: WSChiaConnection, trusted_peers: Dict[str, Any]) -> bool:
- if trusted_peers is None:
- return False
- if not self.config.get("testing", False) and peer.peer_host == "127.0.0.1":
- return True
- if peer.peer_node_id.hex() not in trusted_peers:
- return False
-
- return True
+ return is_trusted_peer(
+ host=peer.peer_info.host,
+ node_id=peer.peer_node_id,
+ trusted_peers=trusted_peers,
+ testing=self.config.get("testing", False),
+ )
def set_capabilities(self, capabilities: List[Tuple[uint16, str]]) -> None:
self._local_capabilities_for_handshake = capabilities
diff --git a/chia/server/start_data_layer.py b/chia/server/start_data_layer.py
--- a/chia/server/start_data_layer.py
+++ b/chia/server/start_data_layer.py
@@ -3,7 +3,7 @@
import logging
import pathlib
import sys
-from typing import Any, Dict, Optional, cast
+from typing import Any, Dict, List, Optional, cast
from chia.data_layer.data_layer import DataLayer
from chia.data_layer.data_layer_api import DataLayerAPI
@@ -29,9 +29,15 @@
def create_data_layer_service(
root_path: pathlib.Path,
config: Dict[str, Any],
+ downloaders: List[str],
+ uploaders: List[str], # dont add FilesystemUploader to this, it is the default uploader
wallet_service: Optional[Service[WalletNode]] = None,
connect_to_daemon: bool = True,
) -> Service[DataLayer]:
+ if uploaders is None:
+ uploaders = []
+ if downloaders is None:
+ downloaders = []
service_config = config[SERVICE_NAME]
self_hostname = config["self_hostname"]
wallet_rpc_port = service_config["wallet_peer"]["port"]
@@ -42,7 +48,14 @@ def create_data_layer_service(
wallet_root_path = wallet_service.root_path
wallet_config = wallet_service.config
wallet_rpc_init = WalletRpcClient.create(self_hostname, uint16(wallet_rpc_port), wallet_root_path, wallet_config)
- data_layer = DataLayer(config=service_config, root_path=root_path, wallet_rpc_init=wallet_rpc_init)
+
+ data_layer = DataLayer(
+ config=service_config,
+ root_path=root_path,
+ wallet_rpc_init=wallet_rpc_init,
+ downloaders=downloaders,
+ uploaders=uploaders,
+ ) # dont add Fil)
api = DataLayerAPI(data_layer)
network_id = service_config["selected_network"]
rpc_port = service_config.get("rpc_port")
@@ -51,7 +64,6 @@ def create_data_layer_service(
rpc_info = (DataLayerRpcApi, cast(int, service_config["rpc_port"]))
return Service(
- server_listen_ports=[service_config["port"]],
root_path=root_path,
config=config,
node=data_layer,
@@ -65,6 +77,7 @@ def create_data_layer_service(
max_request_body_size=service_config.get("rpc_server_max_request_body_size", 26214400),
rpc_info=rpc_info,
connect_to_daemon=connect_to_daemon,
+ listen=False,
)
@@ -86,7 +99,9 @@ async def async_main() -> int:
overwrite=False,
)
- service = create_data_layer_service(DEFAULT_ROOT_PATH, config)
+ uploaders: List[str] = config["data_layer"].get("uploaders", [])
+ downloaders: List[str] = config["data_layer"].get("downloaders", [])
+ service = create_data_layer_service(DEFAULT_ROOT_PATH, config, downloaders, uploaders)
await service.setup_process_global_state()
await service.run()
diff --git a/chia/server/start_farmer.py b/chia/server/start_farmer.py
--- a/chia/server/start_farmer.py
+++ b/chia/server/start_farmer.py
@@ -11,12 +11,11 @@
from chia.rpc.farmer_rpc_api import FarmerRpcApi
from chia.server.outbound_message import NodeType
from chia.server.start_service import RpcInfo, Service, async_run
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import UnresolvedPeerInfo
from chia.util.chia_logging import initialize_service_logging
from chia.util.config import load_config, load_config_cli
from chia.util.default_root import DEFAULT_ROOT_PATH
from chia.util.keychain import Keychain
-from chia.util.network import get_host_addr
# See: https://bugs.python.org/issue29288
"".encode("idna")
@@ -34,12 +33,8 @@ def create_farmer_service(
) -> Service[Farmer]:
service_config = config[SERVICE_NAME]
- connect_peers = []
fnp = service_config.get("full_node_peer")
- if fnp is not None:
- connect_peers.append(
- PeerInfo(str(get_host_addr(fnp["host"], prefer_ipv6=config.get("prefer_ipv6", False))), fnp["port"])
- )
+ connect_peers = set() if fnp is None else {UnresolvedPeerInfo(fnp["host"], fnp["port"])}
overrides = service_config["network_overrides"]["constants"][service_config["selected_network"]]
updated_constants = consensus_constants.replace_str_to_bytes(**overrides)
@@ -60,7 +55,6 @@ def create_farmer_service(
node_type=NodeType.FARMER,
advertised_port=service_config["port"],
service_name=SERVICE_NAME,
- server_listen_ports=[service_config["port"]],
connect_peers=connect_peers,
on_connect_callback=farmer.on_connect,
network_id=network_id,
diff --git a/chia/server/start_full_node.py b/chia/server/start_full_node.py
--- a/chia/server/start_full_node.py
+++ b/chia/server/start_full_node.py
@@ -59,7 +59,6 @@ def create_full_node_service(
advertised_port=service_config["port"],
service_name=SERVICE_NAME,
upnp_ports=upnp_list,
- server_listen_ports=[service_config["port"]],
on_connect_callback=full_node.on_connect,
network_id=network_id,
rpc_info=rpc_info,
@@ -72,7 +71,12 @@ async def async_main(service_config: Dict[str, Any]) -> int:
# TODO: refactor to avoid the double load
config = load_config(DEFAULT_ROOT_PATH, "config.yaml")
config[SERVICE_NAME] = service_config
- overrides = service_config["network_overrides"]["constants"][service_config["selected_network"]]
+ network_id = service_config["selected_network"]
+ overrides = service_config["network_overrides"]["constants"][network_id]
+ if network_id == "testnet10":
+ # activate softforks immediately on testnet
+ if "SOFT_FORK2_HEIGHT" not in overrides:
+ overrides["SOFT_FORK2_HEIGHT"] = 0
updated_constants = DEFAULT_CONSTANTS.replace_str_to_bytes(**overrides)
initialize_service_logging(service_name=SERVICE_NAME, config=config)
service = create_full_node_service(DEFAULT_ROOT_PATH, config, updated_constants)
diff --git a/chia/server/start_harvester.py b/chia/server/start_harvester.py
--- a/chia/server/start_harvester.py
+++ b/chia/server/start_harvester.py
@@ -11,11 +11,10 @@
from chia.rpc.harvester_rpc_api import HarvesterRpcApi
from chia.server.outbound_message import NodeType
from chia.server.start_service import RpcInfo, Service, async_run
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import UnresolvedPeerInfo
from chia.util.chia_logging import initialize_service_logging
from chia.util.config import load_config, load_config_cli
from chia.util.default_root import DEFAULT_ROOT_PATH
-from chia.util.network import get_host_addr
# See: https://bugs.python.org/issue29288
"".encode("idna")
@@ -27,7 +26,7 @@ def create_harvester_service(
root_path: pathlib.Path,
config: Dict[str, Any],
consensus_constants: ConsensusConstants,
- farmer_peer: Optional[PeerInfo],
+ farmer_peer: Optional[UnresolvedPeerInfo],
connect_to_daemon: bool = True,
) -> Service[Harvester]:
service_config = config[SERVICE_NAME]
@@ -49,11 +48,11 @@ def create_harvester_service(
node_type=NodeType.HARVESTER,
advertised_port=service_config["port"],
service_name=SERVICE_NAME,
- server_listen_ports=[service_config["port"]],
- connect_peers=[] if farmer_peer is None else [farmer_peer],
+ connect_peers=set() if farmer_peer is None else {farmer_peer},
network_id=network_id,
rpc_info=rpc_info,
connect_to_daemon=connect_to_daemon,
+ listen=False,
)
@@ -63,9 +62,7 @@ async def async_main() -> int:
service_config = load_config_cli(DEFAULT_ROOT_PATH, "config.yaml", SERVICE_NAME)
config[SERVICE_NAME] = service_config
initialize_service_logging(service_name=SERVICE_NAME, config=config)
- farmer_peer = PeerInfo(
- str(get_host_addr(service_config["farmer_peer"]["host"])), service_config["farmer_peer"]["port"]
- )
+ farmer_peer = UnresolvedPeerInfo(service_config["farmer_peer"]["host"], service_config["farmer_peer"]["port"])
service = create_harvester_service(DEFAULT_ROOT_PATH, config, DEFAULT_CONSTANTS, farmer_peer)
await service.setup_process_global_state()
await service.run()
diff --git a/chia/server/start_introducer.py b/chia/server/start_introducer.py
--- a/chia/server/start_introducer.py
+++ b/chia/server/start_introducer.py
@@ -39,7 +39,6 @@ def create_introducer_service(
peer_api=node__api,
node_type=NodeType.INTRODUCER,
service_name=SERVICE_NAME,
- server_listen_ports=[service_config["port"]],
network_id=network_id,
advertised_port=advertised_port,
connect_to_daemon=connect_to_daemon,
diff --git a/chia/server/start_service.py b/chia/server/start_service.py
--- a/chia/server/start_service.py
+++ b/chia/server/start_service.py
@@ -9,7 +9,7 @@
import sys
from pathlib import Path
from types import FrameType
-from typing import Any, Awaitable, Callable, Coroutine, Dict, Generic, List, Optional, Tuple, Type, TypeVar
+from typing import Any, Awaitable, Callable, Coroutine, Dict, Generic, List, Optional, Set, Tuple, Type, TypeVar
from chia.cmds.init_funcs import chia_full_version_str
from chia.daemon.server import service_launch_lock_path
@@ -20,13 +20,13 @@
from chia.server.ssl_context import chia_ssl_ca_paths, private_ssl_ca_paths
from chia.server.upnp import UPnP
from chia.server.ws_connection import WSChiaConnection
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import PeerInfo, UnresolvedPeerInfo
from chia.util.ints import uint16
from chia.util.lock import Lockfile, LockfileError
+from chia.util.network import resolve
from chia.util.setproctitle import setproctitle
from ..protocols.shared_protocol import capabilities
-from .reconnect_task import start_reconnect_task
# this is used to detect whether we are running in the main process or not, in
# signal handlers. We need to ignore signals in the sub processes.
@@ -55,13 +55,13 @@ def __init__(
*,
config: Dict[str, Any],
upnp_ports: List[int] = [],
- server_listen_ports: List[int] = [],
- connect_peers: List[PeerInfo] = [],
+ connect_peers: Set[UnresolvedPeerInfo] = set(),
on_connect_callback: Optional[Callable[[WSChiaConnection], Awaitable[None]]] = None,
rpc_info: Optional[RpcInfo] = None,
connect_to_daemon: bool = True,
max_request_body_size: Optional[int] = None,
override_capabilities: Optional[List[Tuple[uint16, str]]] = None,
+ listen: bool = True,
) -> None:
self.root_path = root_path
self.config = config
@@ -76,6 +76,8 @@ def __init__(
self._rpc_close_task: Optional[asyncio.Task[None]] = None
self._network_id: str = network_id
self.max_request_body_size = max_request_body_size
+ self._listen = listen
+ self.reconnect_retry_seconds: int = 3
self._log = logging.getLogger(service_name)
self._log.info(f"Starting service {self._service_name} ...")
@@ -119,7 +121,6 @@ def __init__(
self._log.warning(f"No set_server method for {service_name}")
self._upnp_ports = upnp_ports
- self._server_listen_ports = server_listen_ports
self._api = peer_api
self._node = node
@@ -129,9 +130,44 @@ def __init__(
self._on_connect_callback = on_connect_callback
self._advertised_port = advertised_port
- self._reconnect_tasks: Dict[PeerInfo, Optional[asyncio.Task[None]]] = {peer: None for peer in connect_peers}
+ self._connect_peers = connect_peers
+ self._connect_peers_task: Optional[asyncio.Task[None]] = None
self.upnp: UPnP = UPnP()
+ async def _connect_peers_task_handler(self) -> None:
+ resolved_peers: Dict[UnresolvedPeerInfo, PeerInfo] = {}
+ prefer_ipv6 = self.config.get("prefer_ipv6", False)
+ while True:
+ for unresolved in self._connect_peers:
+ resolved = resolved_peers.get(unresolved)
+ if resolved is None:
+ try:
+ resolved = PeerInfo(await resolve(unresolved.host, prefer_ipv6=prefer_ipv6), unresolved.port)
+ except Exception as e:
+ self._log.warning(f"Failed to resolve {unresolved.host}: {e}")
+ continue
+ self._log.info(f"Add resolved {resolved}")
+ resolved_peers[unresolved] = resolved
+
+ if any(connection.peer_info == resolved for connection in self._server.all_connections.values()):
+ continue
+
+ if not await self._server.start_client(resolved, None):
+ self._log.info(f"Failed to connect to {resolved}")
+ # Re-resolve to make sure the IP didn't change, this helps for example to keep dyndns hostnames
+ # up to date.
+ try:
+ resolved_new = PeerInfo(
+ await resolve(unresolved.host, prefer_ipv6=prefer_ipv6), unresolved.port
+ )
+ except Exception as e:
+ self._log.warning(f"Failed to resolve after connection failure {unresolved.host}: {e}")
+ continue
+ if resolved_new != resolved:
+ self._log.info(f"Host {unresolved.host} changed from {resolved} to {resolved_new}")
+ resolved_peers[unresolved] = resolved_new
+ await asyncio.sleep(self.reconnect_retry_seconds)
+
async def start(self) -> None:
# TODO: move those parameters to `__init__`
if self._did_start:
@@ -151,11 +187,14 @@ async def start(self) -> None:
for port in self._upnp_ports:
self.upnp.remap(port)
- await self._server.start_server(self.config.get("prefer_ipv6", False), self._on_connect_callback)
+ await self._server.start(
+ listen=self._listen,
+ prefer_ipv6=self.config.get("prefer_ipv6", False),
+ on_connect=self._on_connect_callback,
+ )
self._advertised_port = self._server.get_port()
- for peer in self._reconnect_tasks.keys():
- self.add_peer(peer)
+ self._connect_peers_task = asyncio.create_task(self._connect_peers_task_handler())
self._log.info(
f"Started {self._service_name} service on network_id: {self._network_id} "
@@ -186,11 +225,8 @@ async def run(self) -> None:
self._log.error(f"{self._service_name}: already running")
raise ValueError(f"{self._service_name}: already running") from e
- def add_peer(self, peer: PeerInfo) -> None:
- if self._reconnect_tasks.get(peer) is not None:
- raise ServiceException(f"Peer {peer} already added")
-
- self._reconnect_tasks[peer] = start_reconnect_task(self._server, peer, self._log)
+ def add_peer(self, peer: UnresolvedPeerInfo) -> None:
+ self._connect_peers.add(peer)
async def setup_process_global_state(self) -> None:
# Being async forces this to be run from within an active event loop as is
@@ -238,10 +274,8 @@ def stop(self) -> None:
self.upnp.release(port)
self._log.info("Cancelling reconnect task")
- for task in self._reconnect_tasks.values():
- if task is not None:
- task.cancel()
- self._reconnect_tasks.clear()
+ if self._connect_peers_task is not None:
+ self._connect_peers_task.cancel()
self._log.info("Closing connections")
self._server.close_all()
self._node._close()
diff --git a/chia/server/start_timelord.py b/chia/server/start_timelord.py
--- a/chia/server/start_timelord.py
+++ b/chia/server/start_timelord.py
@@ -12,11 +12,10 @@
from chia.server.start_service import RpcInfo, Service, async_run
from chia.timelord.timelord import Timelord
from chia.timelord.timelord_api import TimelordAPI
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import UnresolvedPeerInfo
from chia.util.chia_logging import initialize_service_logging
from chia.util.config import load_config, load_config_cli
from chia.util.default_root import DEFAULT_ROOT_PATH
-from chia.util.network import get_host_addr
# See: https://bugs.python.org/issue29288
"".encode("idna")
@@ -35,9 +34,9 @@ def create_timelord_service(
) -> Service[Timelord]:
service_config = config[SERVICE_NAME]
- connect_peers = [
- PeerInfo(str(get_host_addr(service_config["full_node_peer"]["host"])), service_config["full_node_peer"]["port"])
- ]
+ connect_peers = {
+ UnresolvedPeerInfo(service_config["full_node_peer"]["host"], service_config["full_node_peer"]["port"])
+ }
overrides = service_config["network_overrides"]["constants"][service_config["selected_network"]]
updated_constants = constants.replace_str_to_bytes(**overrides)
@@ -57,11 +56,11 @@ def create_timelord_service(
node_type=NodeType.TIMELORD,
advertised_port=service_config["port"],
service_name=SERVICE_NAME,
- server_listen_ports=[service_config["port"]],
connect_peers=connect_peers,
network_id=network_id,
rpc_info=rpc_info,
connect_to_daemon=connect_to_daemon,
+ listen=False,
)
diff --git a/chia/server/start_wallet.py b/chia/server/start_wallet.py
--- a/chia/server/start_wallet.py
+++ b/chia/server/start_wallet.py
@@ -11,12 +11,11 @@
from chia.rpc.wallet_rpc_api import WalletRpcApi
from chia.server.outbound_message import NodeType
from chia.server.start_service import RpcInfo, Service, async_run
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import UnresolvedPeerInfo
from chia.util.chia_logging import initialize_service_logging
from chia.util.config import load_config, load_config_cli
from chia.util.default_root import DEFAULT_ROOT_PATH
from chia.util.keychain import Keychain
-from chia.util.network import get_host_addr
from chia.util.task_timing import maybe_manage_task_instrumentation
from chia.wallet.wallet_node import WalletNode
@@ -50,13 +49,8 @@ def create_wallet_service(
)
peer_api = WalletNodeAPI(node)
fnp = service_config.get("full_node_peer")
+ connect_peers = set() if fnp is None else {UnresolvedPeerInfo(fnp["host"], fnp["port"])}
- if fnp:
- connect_peers = [
- PeerInfo(str(get_host_addr(fnp["host"], prefer_ipv6=config.get("prefer_ipv6", False))), fnp["port"])
- ]
- else:
- connect_peers = []
network_id = service_config["selected_network"]
rpc_port = service_config.get("rpc_port")
rpc_info: Optional[RpcInfo] = None
@@ -64,11 +58,11 @@ def create_wallet_service(
rpc_info = (WalletRpcApi, service_config["rpc_port"])
return Service(
- server_listen_ports=[service_config["port"]],
root_path=root_path,
config=config,
node=node,
peer_api=peer_api,
+ listen=False,
node_type=NodeType.WALLET,
service_name=SERVICE_NAME,
on_connect_callback=node.on_connect,
diff --git a/chia/server/ws_connection.py b/chia/server/ws_connection.py
--- a/chia/server/ws_connection.py
+++ b/chia/server/ws_connection.py
@@ -13,6 +13,7 @@
from aiohttp import ClientSession, WSCloseCode, WSMessage, WSMsgType
from aiohttp.client import ClientWebSocketResponse
from aiohttp.web import WebSocketResponse
+from packaging.version import Version
from typing_extensions import Protocol, final
from chia.cmds.init_funcs import chia_full_version_str
@@ -70,8 +71,7 @@ class WSChiaConnection:
local_port: int
local_capabilities_for_handshake: List[Tuple[uint16, str]] = field(repr=False)
local_capabilities: List[Capability]
- peer_host: str
- peer_port: uint16
+ peer_info: PeerInfo
peer_node_id: bytes32
log: logging.Logger = field(repr=False)
@@ -111,8 +111,8 @@ class WSChiaConnection:
request_nonce: uint16 = uint16(0)
peer_capabilities: List[Capability] = field(default_factory=list)
# Used by the Chia Seeder.
- version: str = field(default_factory=str)
- protocol_version: str = field(default_factory=str)
+ version: Version = field(default_factory=lambda: Version("0"))
+ protocol_version: Version = field(default_factory=lambda: Version("0"))
log_rate_limit_last_time: Dict[ProtocolMessageTypes, float] = field(
default_factory=create_default_last_message_time_dict,
@@ -129,7 +129,6 @@ def create(
log: logging.Logger,
is_outbound: bool,
received_message_callback: Optional[ConnectionCallback],
- peer_host: str,
close_callback: Optional[ConnectionClosedCallbackProtocol],
peer_id: bytes32,
inbound_rate_limit_percent: int,
@@ -141,7 +140,7 @@ def create(
peername = ws._writer.transport.get_extra_info("peername")
if peername is None:
- raise ValueError(f"Was not able to get peername from {peer_host}")
+ raise ValueError(f"Was not able to get peername for {peer_id}")
if is_outbound:
request_nonce = uint16(0)
@@ -157,8 +156,7 @@ def create(
local_port=server_port,
local_capabilities_for_handshake=local_capabilities_for_handshake,
local_capabilities=known_active_capabilities(local_capabilities_for_handshake),
- peer_host=peer_host,
- peer_port=peername[1],
+ peer_info=PeerInfo(peername[0], peername[1]),
peer_node_id=peer_id,
log=log,
close_callback=close_callback,
@@ -221,8 +219,8 @@ async def perform_handshake(
if inbound_handshake.network_id != network_id:
raise ProtocolError(Err.INCOMPATIBLE_NETWORK_ID)
- self.version = inbound_handshake.software_version
- self.protocol_version = inbound_handshake.protocol_version
+ self.version = Version(inbound_handshake.software_version)
+ self.protocol_version = Version(inbound_handshake.protocol_version)
self.peer_server_port = inbound_handshake.server_port
self.connection_type = NodeType(inbound_handshake.node_type)
# "1" means capability is enabled
@@ -271,7 +269,7 @@ async def close(
if self.closed:
# always try to call the callback even for closed connections
with log_exceptions(self.log, consume=True):
- self.log.debug(f"Closing already closed connection for {self.peer_host}")
+ self.log.debug(f"Closing already closed connection for {self.peer_info.host}")
if self.close_callback is not None:
self.close_callback(self, ban_time, closed_connection=True)
self._close_event.set()
@@ -312,7 +310,7 @@ async def wait_until_closed(self) -> None:
async def ban_peer_bad_protocol(self, log_err_msg: str) -> None:
"""Ban peer for protocol violation"""
ban_seconds = INTERNAL_PROTOCOL_ERROR_BAN_SECONDS
- self.log.error(f"Banning peer for {ban_seconds} seconds: {self.peer_host} {log_err_msg}")
+ self.log.error(f"Banning peer for {ban_seconds} seconds: {self.peer_info.host} {log_err_msg}")
await self.close(ban_seconds, WSCloseCode.PROTOCOL_ERROR, Err.INVALID_PROTOCOL_MESSAGE)
def cancel_pending_requests(self) -> None:
@@ -345,10 +343,10 @@ async def outbound_handler(self) -> None:
expected = True
if expected:
- self.log.warning(f"{e} {self.peer_host}")
+ self.log.warning(f"{e} {self.peer_info.host}")
else:
error_stack = traceback.format_exc()
- self.log.error(f"Exception: {e} with {self.peer_host}")
+ self.log.error(f"Exception: {e} with {self.peer_info.host}")
self.log.error(f"Exception Stack: {error_stack}")
async def _api_call(self, full_message: Message, task_id: bytes32) -> None:
@@ -358,7 +356,7 @@ async def _api_call(self, full_message: Message, task_id: bytes32) -> None:
if self.received_message_callback is not None:
await self.received_message_callback(self)
self.log.debug(
- f"<- {ProtocolMessageTypes(full_message.type).name} from peer {self.peer_node_id} {self.peer_host}"
+ f"<- {ProtocolMessageTypes(full_message.type).name} from peer {self.peer_node_id} {self.peer_info.host}"
)
message_type = ProtocolMessageTypes(full_message.type).name
@@ -541,7 +539,9 @@ async def send_request(self, message_no_id: Message, timeout: int) -> Optional[M
if message.id in self.request_results:
result = self.request_results[message.id]
assert result is not None
- self.log.debug(f"<- {ProtocolMessageTypes(result.type).name} from: {self.peer_host}:{self.peer_port}")
+ self.log.debug(
+ f"<- {ProtocolMessageTypes(result.type).name} from: {self.peer_info.host}:{self.peer_info.port}"
+ )
self.request_results.pop(message.id)
return result
@@ -567,13 +567,13 @@ async def _send_message(self, message: Message) -> None:
if not self.outbound_rate_limiter.process_msg_and_check(
message, self.local_capabilities, self.peer_capabilities
):
- if not is_localhost(self.peer_host):
+ if not is_localhost(self.peer_info.host):
message_type = ProtocolMessageTypes(message.type)
last_time = self.log_rate_limit_last_time[message_type]
now = time.monotonic()
self.log_rate_limit_last_time[message_type] = now
if now - last_time >= 60:
- msg = f"Rate limiting ourselves. message type: {message_type.name}, peer: {self.peer_host}"
+ msg = f"Rate limiting ourselves. message type: {message_type.name}, peer: {self.peer_info.host}"
self.log.debug(msg)
# TODO: fix this special case. This function has rate limits which are too low.
@@ -584,11 +584,13 @@ async def _send_message(self, message: Message) -> None:
else:
self.log.debug(
f"Not rate limiting ourselves. message type: {ProtocolMessageTypes(message.type).name}, "
- f"peer: {self.peer_host}"
+ f"peer: {self.peer_info.host}"
)
await self.ws.send_bytes(encoded)
- self.log.debug(f"-> {ProtocolMessageTypes(message.type).name} to peer {self.peer_host} {self.peer_node_id}")
+ self.log.debug(
+ f"-> {ProtocolMessageTypes(message.type).name} to peer {self.peer_info.host} {self.peer_node_id}"
+ )
self.bytes_written += size
async def _read_one_message(self) -> Optional[Message]:
@@ -608,17 +610,17 @@ async def _read_one_message(self) -> Optional[Message]:
connection_type_str = ""
if message.type == WSMsgType.CLOSING:
self.log.debug(
- f"Closing connection to {connection_type_str} {self.peer_host}:"
+ f"Closing connection to {connection_type_str} {self.peer_info.host}:"
f"{self.peer_server_port}/"
- f"{self.peer_port}"
+ f"{self.peer_info.port}"
)
asyncio.create_task(self.close())
await asyncio.sleep(3)
elif message.type == WSMsgType.CLOSE:
self.log.debug(
- f"Peer closed connection {connection_type_str} {self.peer_host}:"
+ f"Peer closed connection {connection_type_str} {self.peer_info.host}:"
f"{self.peer_server_port}/"
- f"{self.peer_port}"
+ f"{self.peer_info.port}"
)
asyncio.create_task(self.close())
await asyncio.sleep(3)
@@ -639,9 +641,9 @@ async def _read_one_message(self) -> Optional[Message]:
if not self.inbound_rate_limiter.process_msg_and_check(
full_message_loaded, self.local_capabilities, self.peer_capabilities
):
- if self.local_type == NodeType.FULL_NODE and not is_localhost(self.peer_host):
+ if self.local_type == NodeType.FULL_NODE and not is_localhost(self.peer_info.host):
self.log.error(
- f"Peer has been rate limited and will be disconnected: {self.peer_host}, "
+ f"Peer has been rate limited and will be disconnected: {self.peer_info.host}, "
f"message: {message_type}"
)
# Only full node disconnects peers, to prevent abuse and crashing timelords, farmers, etc
@@ -650,8 +652,8 @@ async def _read_one_message(self) -> Optional[Message]:
return None
else:
self.log.debug(
- f"Peer surpassed rate limit {self.peer_host}, message: {message_type}, "
- f"port {self.peer_port} but not disconnecting"
+ f"Peer surpassed rate limit {self.peer_info.host}, message: {message_type}, "
+ f"port {self.peer_info.port} but not disconnecting"
)
return full_message_loaded
return full_message_loaded
@@ -671,7 +673,7 @@ async def _read_one_message(self) -> Optional[Message]:
# Used by the Chia Seeder.
def get_version(self) -> str:
- return self.version
+ return str(self.version)
def get_tls_version(self) -> str:
ssl_obj = self._get_extra_info("ssl_object")
@@ -685,15 +687,15 @@ def get_peer_info(self) -> Optional[PeerInfo]:
if result is None:
return None
connection_host = result[0]
- port = self.peer_server_port if self.peer_server_port is not None else self.peer_port
+ port = self.peer_server_port if self.peer_server_port is not None else self.peer_info.port
return PeerInfo(connection_host, port)
def get_peer_logging(self) -> PeerInfo:
info: Optional[PeerInfo] = self.get_peer_info()
if info is None:
- # in this case, we will use self.peer_host which is friendlier for logging
- port = self.peer_server_port if self.peer_server_port is not None else self.peer_port
- return PeerInfo(self.peer_host, port)
+ # in this case, we will use self.peer_info.host which is friendlier for logging
+ port = self.peer_server_port if self.peer_server_port is not None else self.peer_info.port
+ return PeerInfo(self.peer_info.host, port)
else:
return info
diff --git a/chia/simulator/block_tools.py b/chia/simulator/block_tools.py
--- a/chia/simulator/block_tools.py
+++ b/chia/simulator/block_tools.py
@@ -11,7 +11,7 @@
import sys
import tempfile
import time
-from dataclasses import replace
+from dataclasses import dataclass, replace
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
@@ -45,7 +45,6 @@
detect_potential_template_generator,
simple_solution_generator,
)
-from chia.full_node.generator import setup_generator_args
from chia.full_node.signage_point import SignagePoint
from chia.plotters.chiapos import Params
from chia.plotting.create_plots import PlotKeys, create_plots
@@ -121,9 +120,7 @@
master_sk_to_pool_sk,
master_sk_to_wallet_sk,
)
-from chia.wallet.puzzles.rom_bootstrap_generator import get_generator
-
-GENERATOR_MOD = get_generator()
+from chia.wallet.puzzles.rom_bootstrap_generator import GENERATOR_MOD
test_constants = DEFAULT_CONSTANTS.replace(
**{
@@ -358,26 +355,34 @@ async def setup_plots(
num_non_keychain_plots: int = 3,
plot_size: int = 20,
bitfield: bool = True,
- ):
+ ) -> bool:
self.add_plot_directory(self.plot_dir)
assert self.created_plots == 0
+ existing_plots: bool = True
# OG Plots
for i in range(num_og_plots):
- await self.new_plot(plot_size=plot_size, bitfield=bitfield)
+ plot = await self.new_plot(plot_size=plot_size, bitfield=bitfield)
+ if plot.new_plot:
+ existing_plots = False
# Pool Plots
for i in range(num_pool_plots):
- await self.new_plot(self.pool_ph, plot_size=plot_size, bitfield=bitfield)
+ plot = await self.new_plot(self.pool_ph, plot_size=plot_size, bitfield=bitfield)
+ if plot.new_plot:
+ existing_plots = False
# Some plots with keys that are not in the keychain
for i in range(num_non_keychain_plots):
- await self.new_plot(
+ plot = await self.new_plot(
path=self.plot_dir / "not_in_keychain",
plot_keys=PlotKeys(G1Element(), G1Element(), None),
exclude_plots=True,
plot_size=plot_size,
bitfield=bitfield,
)
+ if plot.new_plot:
+ existing_plots = False
await self.refresh_plots()
assert len(self.plot_manager.plots) == len(self.expected_plots)
+ return existing_plots
async def new_plot(
self,
@@ -388,7 +393,7 @@ async def new_plot(
exclude_plots: bool = False,
plot_size: int = 20,
bitfield: bool = True,
- ) -> Optional[bytes32]:
+ ) -> BlockToolsNewPlotResult:
final_dir = self.plot_dir
if path is not None:
final_dir = path
@@ -432,6 +437,7 @@ async def new_plot(
plot_id_new: Optional[bytes32] = None
path_new: Optional[Path] = None
+ new_plot: bool = True
if len(created):
assert len(existed) == 0
@@ -440,13 +446,14 @@ async def new_plot(
if len(existed):
assert len(created) == 0
plot_id_new, path_new = list(existed.items())[0]
+ new_plot = False
assert plot_id_new is not None
assert path_new is not None
if not exclude_plots:
self.expected_plots[plot_id_new] = path_new
- return plot_id_new
+ return BlockToolsNewPlotResult(plot_id_new, new_plot)
except KeyboardInterrupt:
shutil.rmtree(self.temp_dir, ignore_errors=True)
@@ -1718,8 +1725,8 @@ def get_full_block_and_block_record(
def compute_cost_test(generator: BlockGenerator, cost_per_byte: int) -> Tuple[Optional[uint16], uint64]:
try:
- block_program, block_program_args = setup_generator_args(generator)
- clvm_cost, result = GENERATOR_MOD.run_mempool_with_cost(INFINITE_COST, block_program, block_program_args)
+ block_program_args = Program.to([[bytes(g) for g in generator.generator_refs]])
+ clvm_cost, result = GENERATOR_MOD.run_mempool_with_cost(INFINITE_COST, generator.program, block_program_args)
size_cost = len(bytes(generator.program)) * cost_per_byte
condition_cost = 0
@@ -2121,6 +2128,12 @@ def create_test_unfinished_block(
)
+@dataclass
+class BlockToolsNewPlotResult:
+ plot_id: bytes32
+ new_plot: bool
+
+
# Remove these counters when `create_block_tools` and `create_block_tools_async` are removed
create_block_tools_async_count = 0
create_block_tools_count = 0
diff --git a/chia/simulator/full_node_simulator.py b/chia/simulator/full_node_simulator.py
--- a/chia/simulator/full_node_simulator.py
+++ b/chia/simulator/full_node_simulator.py
@@ -24,8 +24,8 @@
from chia.types.spend_bundle import SpendBundle
from chia.util.config import lock_and_load_config, save_config
from chia.util.ints import uint8, uint32, uint64, uint128
+from chia.wallet.payment import Payment
from chia.wallet.transaction_record import TransactionRecord
-from chia.wallet.util.wallet_types import AmountWithPuzzlehash
from chia.wallet.wallet import Wallet
from chia.wallet.wallet_node import WalletNode
from chia.wallet.wallet_state_manager import WalletStateManager
@@ -598,10 +598,10 @@ async def create_coins_with_amounts(
if len(amounts) == 0:
return set()
- outputs: List[AmountWithPuzzlehash] = []
+ outputs: List[Payment] = []
for amount in amounts:
puzzle_hash = await wallet.get_new_puzzlehash()
- outputs.append({"puzzlehash": puzzle_hash, "amount": uint64(amount), "memos": []})
+ outputs.append(Payment(puzzle_hash, amount))
transaction_records: List[TransactionRecord] = []
outputs_iterator = iter(outputs)
@@ -613,8 +613,8 @@ async def create_coins_with_amounts(
if len(outputs_group) > 0:
async with wallet.wallet_state_manager.lock:
tx = await wallet.generate_signed_transaction(
- amount=outputs_group[0]["amount"],
- puzzle_hash=outputs_group[0]["puzzlehash"],
+ amount=outputs_group[0].amount,
+ puzzle_hash=outputs_group[0].puzzle_hash,
primaries=outputs_group[1:],
)
await wallet.push_transaction(tx=tx)
@@ -625,7 +625,7 @@ async def create_coins_with_amounts(
await self.process_transaction_records(records=transaction_records, timeout=None)
output_coins = {coin for transaction_record in transaction_records for coin in transaction_record.additions}
- puzzle_hashes = {output["puzzlehash"] for output in outputs}
+ puzzle_hashes = {output.puzzle_hash for output in outputs}
change_coins = {coin for coin in output_coins if coin.puzzle_hash not in puzzle_hashes}
coins_to_receive = output_coins - change_coins
await wait_for_coins_in_wallet(coins=coins_to_receive, wallet=wallet)
diff --git a/chia/simulator/setup_nodes.py b/chia/simulator/setup_nodes.py
--- a/chia/simulator/setup_nodes.py
+++ b/chia/simulator/setup_nodes.py
@@ -32,7 +32,7 @@
from chia.simulator.time_out_assert import time_out_assert_custom_interval
from chia.timelord.timelord import Timelord
from chia.types.blockchain_format.sized_bytes import bytes32
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import UnresolvedPeerInfo
from chia.util.hash import std_hash
from chia.util.ints import uint16, uint32
from chia.util.keychain import Keychain
@@ -302,7 +302,7 @@ async def setup_farmer_multi_harvester(
]
farmer_service = await farmer_node_iterators[0].__anext__()
if start_services:
- farmer_peer = PeerInfo(block_tools.config["self_hostname"], uint16(farmer_service._server._port))
+ farmer_peer = UnresolvedPeerInfo(block_tools.config["self_hostname"], uint16(farmer_service._server._port))
else:
farmer_peer = None
harvester_node_iterators = []
@@ -432,7 +432,7 @@ async def setup_full_system_inner(
harvester_iter = setup_harvester(
shared_b_tools,
shared_b_tools.root_path / "harvester",
- PeerInfo(shared_b_tools.config["self_hostname"], farmer_service._server.get_port()),
+ UnresolvedPeerInfo(shared_b_tools.config["self_hostname"], farmer_service._server.get_port()),
consensus_constants,
)
vdf1_port = uint16(find_available_listen_port("vdf1"))
diff --git a/chia/simulator/setup_services.py b/chia/simulator/setup_services.py
--- a/chia/simulator/setup_services.py
+++ b/chia/simulator/setup_services.py
@@ -30,7 +30,7 @@
from chia.simulator.start_simulator import create_full_node_simulator_service
from chia.timelord.timelord import Timelord
from chia.timelord.timelord_launcher import kill_processes, spawn_process
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import UnresolvedPeerInfo
from chia.util.bech32m import encode_puzzle_hash
from chia.util.config import config_path_for_filename, lock_and_load_config, save_config
from chia.util.ints import uint16
@@ -242,7 +242,7 @@ async def setup_wallet_node(
async def setup_harvester(
b_tools: BlockTools,
root_path: Path,
- farmer_peer: Optional[PeerInfo],
+ farmer_peer: Optional[UnresolvedPeerInfo],
consensus_constants: ConsensusConstants,
start_service: bool = True,
) -> AsyncGenerator[Service[Harvester], None]:
diff --git a/chia/simulator/start_simulator.py b/chia/simulator/start_simulator.py
--- a/chia/simulator/start_simulator.py
+++ b/chia/simulator/start_simulator.py
@@ -54,7 +54,6 @@ def create_full_node_simulator_service(
node_type=NodeType.FULL_NODE,
advertised_port=service_config["port"],
service_name=SERVICE_NAME,
- server_listen_ports=[service_config["port"]],
on_connect_callback=node.on_connect,
network_id=network_id,
rpc_info=(SimulatorFullNodeRpcApi, service_config["rpc_port"]),
diff --git a/chia/simulator/wallet_tools.py b/chia/simulator/wallet_tools.py
--- a/chia/simulator/wallet_tools.py
+++ b/chia/simulator/wallet_tools.py
@@ -15,7 +15,7 @@
from chia.types.condition_opcodes import ConditionOpcode
from chia.types.condition_with_args import ConditionWithArgs
from chia.types.spend_bundle import SpendBundle
-from chia.util.condition_tools import conditions_by_opcode, conditions_for_solution
+from chia.util.condition_tools import conditions_dict_for_solution
from chia.util.hash import std_hash
from chia.util.ints import uint32, uint64
from chia.wallet.derive_keys import master_sk_to_wallet_sk
@@ -180,12 +180,9 @@ def sign_transaction(self, coin_spends: List[CoinSpend]) -> SpendBundle:
for coin_spend in coin_spends: # noqa
secret_key = self.get_private_key_for_puzzle_hash(coin_spend.coin.puzzle_hash)
synthetic_secret_key = calculate_synthetic_secret_key(secret_key, DEFAULT_HIDDEN_PUZZLE_HASH)
- err, con, cost = conditions_for_solution(
+ conditions_dict = conditions_dict_for_solution(
coin_spend.puzzle_reveal, coin_spend.solution, self.constants.MAX_BLOCK_COST_CLVM
)
- if not con:
- raise ValueError(err)
- conditions_dict = conditions_by_opcode(con)
for cwa in conditions_dict.get(ConditionOpcode.AGG_SIG_UNSAFE, []):
msg = cwa.vars[1]
diff --git a/chia/timelord/timelord.py b/chia/timelord/timelord.py
--- a/chia/timelord/timelord.py
+++ b/chia/timelord/timelord.py
@@ -94,8 +94,6 @@ def __init__(self, root_path, config: Dict, constants: ConsensusConstants):
self.allows_iters: List[Chain] = []
# Last peak received, None if it's already processed.
self.new_peak: Optional[timelord_protocol.NewPeakTimelord] = None
- # Last end of subslot bundle, None if we built a peak on top of it.
- self.new_subslot_end: Optional[EndOfSubSlotBundle] = None
# Last state received. Can either be a new peak or a new EndOfSubslotBundle.
# Unfinished block info, iters adjusted to the last peak.
self.unfinished_blocks: List[timelord_protocol.NewUnfinishedBlockTimelord] = []
@@ -376,14 +374,6 @@ async def _handle_new_peak(self):
self.new_peak = None
await self._reset_chains()
- async def _handle_subslot_end(self):
- self.last_state.set_state(self.new_subslot_end)
- for block in self.unfinished_blocks:
- if self._can_infuse_unfinished_block(block) is not None:
- self.total_unfinished += 1
- self.new_subslot_end = None
- await self._reset_chains()
-
async def _map_chains_with_vdf_clients(self):
while not self._shut_down:
picked_chain = None
@@ -826,9 +816,12 @@ async def _check_for_end_of_subslot(self, iter_to_look_for: uint64):
# No overflow blocks in a new epoch
self.unfinished_blocks = []
self.overflow_blocks = []
- self.new_subslot_end = eos_bundle
- await self._handle_subslot_end()
+ self.last_state.set_state(eos_bundle)
+ for block in self.unfinished_blocks:
+ if self._can_infuse_unfinished_block(block) is not None:
+ self.total_unfinished += 1
+ await self._reset_chains()
async def _handle_failures(self):
if len(self.vdf_failures) > 0:
diff --git a/chia/timelord/timelord_api.py b/chia/timelord/timelord_api.py
--- a/chia/timelord/timelord_api.py
+++ b/chia/timelord/timelord_api.py
@@ -43,12 +43,10 @@ async def new_peak_timelord(self, new_peak: timelord_protocol.NewPeakTimelord) -
):
log.info("Skipping peak, already have.")
self.timelord.state_changed("skipping_peak", {"height": new_peak.reward_chain_block.height})
- return None
else:
log.warning("block that we don't have, changing to it.")
self.timelord.new_peak = new_peak
self.timelord.state_changed("new_peak", {"height": new_peak.reward_chain_block.height})
- self.timelord.new_subslot_end = None
@api_request()
async def new_unfinished_block_timelord(self, new_unfinished_block: timelord_protocol.NewUnfinishedBlockTimelord):
diff --git a/chia/timelord/timelord_launcher.py b/chia/timelord/timelord_launcher.py
--- a/chia/timelord/timelord_launcher.py
+++ b/chia/timelord/timelord_launcher.py
@@ -13,7 +13,7 @@
from chia.util.chia_logging import initialize_logging
from chia.util.config import load_config
from chia.util.default_root import DEFAULT_ROOT_PATH
-from chia.util.network import get_host_addr
+from chia.util.network import resolve
from chia.util.setproctitle import setproctitle
active_processes: List = []
@@ -51,7 +51,7 @@ async def spawn_process(host: str, port: int, counter: int, lock: asyncio.Lock,
try:
dirname = path_to_vdf_client.parent
basename = path_to_vdf_client.name
- resolved = get_host_addr(host, prefer_ipv6=prefer_ipv6)
+ resolved = await resolve(host, prefer_ipv6=prefer_ipv6)
proc = await asyncio.create_subprocess_shell(
f"{basename} {resolved} {port} {counter}",
stdout=asyncio.subprocess.PIPE,
diff --git a/chia/types/coin_spend.py b/chia/types/coin_spend.py
--- a/chia/types/coin_spend.py
+++ b/chia/types/coin_spend.py
@@ -65,3 +65,10 @@ def compute_additions_with_cost(
def compute_additions(cs: CoinSpend, *, max_cost: int = DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM) -> List[Coin]:
return compute_additions_with_cost(cs, max_cost=max_cost)[0]
+
+
+@streamable
+@dataclass(frozen=True)
+class SpendInfo(Streamable):
+ puzzle: SerializedProgram
+ solution: SerializedProgram
diff --git a/chia/types/condition_opcodes.py b/chia/types/condition_opcodes.py
--- a/chia/types/condition_opcodes.py
+++ b/chia/types/condition_opcodes.py
@@ -4,7 +4,7 @@
from typing import Any
-# See chia/wallet/puzzles/condition_codes.clvm
+# See chia/wallet/puzzles/condition_codes.clib
class ConditionOpcode(bytes, enum.Enum):
# AGG_SIG is ascii "1"
diff --git a/chia/types/eligible_coin_spends.py b/chia/types/eligible_coin_spends.py
new file mode 100644
--- /dev/null
+++ b/chia/types/eligible_coin_spends.py
@@ -0,0 +1,108 @@
+from __future__ import annotations
+
+import dataclasses
+from typing import Dict, List, Optional, Tuple
+
+from chia.consensus.condition_costs import ConditionCost
+from chia.types.blockchain_format.coin import Coin
+from chia.types.blockchain_format.serialized_program import SerializedProgram
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_spend import CoinSpend
+from chia.types.mempool_item import BundleCoinSpend
+from chia.util.ints import uint64
+
+
+def run_for_cost(
+ puzzle_reveal: SerializedProgram, solution: SerializedProgram, additions_count: int, max_cost: int
+) -> uint64:
+ create_coins_cost = additions_count * ConditionCost.CREATE_COIN.value
+ clvm_cost, _ = puzzle_reveal.run_mempool_with_cost(max_cost, solution)
+ saved_cost = uint64(clvm_cost + create_coins_cost)
+ return saved_cost
+
+
+@dataclasses.dataclass(frozen=True)
+class DedupCoinSpend:
+ solution: SerializedProgram
+ cost: Optional[uint64]
+
+
+@dataclasses.dataclass(frozen=True)
+class EligibleCoinSpends:
+ eligible_spends: Dict[bytes32, DedupCoinSpend] = dataclasses.field(default_factory=dict)
+
+ def get_deduplication_info(
+ self, *, bundle_coin_spends: Dict[bytes32, BundleCoinSpend], max_cost: int
+ ) -> Tuple[List[CoinSpend], uint64, List[Coin]]:
+ """
+ Checks all coin spends of a mempool item for deduplication eligibility and
+ provides the caller with the necessary information that allows it to perform
+ identical spend aggregation on that mempool item if possible
+
+ Args:
+ bundle_coin_spends: the mempool item's coin spends data
+ max_cost: the maximum limit when running for cost
+
+ Returns:
+ List[CoinSpend]: list of unique coin spends in this mempool item
+ uint64: the cost we're saving by deduplicating eligible coins
+ List[Coin]: list of unique additions in this mempool item
+
+ Raises:
+ ValueError to skip the mempool item we're currently in, if it's
+ attempting to spend an eligible coin with a different solution than the
+ one we're already deduplicating on.
+ """
+ cost_saving = 0
+ unique_coin_spends: List[CoinSpend] = []
+ unique_additions: List[Coin] = []
+ new_eligible_spends: Dict[bytes32, DedupCoinSpend] = {}
+ # See if this item has coin spends that are eligible for deduplication
+ for coin_id, spend_data in bundle_coin_spends.items():
+ if not spend_data.eligible_for_dedup:
+ unique_coin_spends.append(spend_data.coin_spend)
+ unique_additions.extend(spend_data.additions)
+ continue
+ # See if we processed an item with this coin before
+ dedup_coin_spend = self.eligible_spends.get(coin_id)
+ if dedup_coin_spend is None:
+ # We didn't process an item with this coin before. If we end up including
+ # this item, add this pair to eligible_spends
+ new_eligible_spends[coin_id] = DedupCoinSpend(spend_data.coin_spend.solution, None)
+ unique_coin_spends.append(spend_data.coin_spend)
+ unique_additions.extend(spend_data.additions)
+ continue
+ # See if the solution was identical
+ current_solution, duplicate_cost = dataclasses.astuple(dedup_coin_spend)
+ if current_solution != spend_data.coin_spend.solution:
+ # It wasn't, so let's skip this whole item because it's relying on
+ # spending this coin with a different solution and that would
+ # conflict with the coin spends that we're deduplicating already
+ # NOTE: We can miss an opportunity to deduplicate on other solutions
+ # even if they end up saving more cost, as we're going for the first
+ # solution we see from the relatively highest FPC item, to avoid
+ # severe performance and/or time-complexity impact
+ raise ValueError("Solution is different from what we're deduplicating on")
+ # Let's calculate the saved cost if we never did that before
+ if duplicate_cost is None:
+ # See first if this mempool item had this cost computed before
+ # This can happen if this item didn't get included in the previous block
+ spend_cost = spend_data.cost
+ if spend_cost is None:
+ spend_cost = run_for_cost(
+ puzzle_reveal=spend_data.coin_spend.puzzle_reveal,
+ solution=spend_data.coin_spend.solution,
+ additions_count=len(spend_data.additions),
+ max_cost=max_cost,
+ )
+ # Update this mempool item's coin spends map
+ bundle_coin_spends[coin_id] = BundleCoinSpend(
+ spend_data.coin_spend, spend_data.eligible_for_dedup, spend_data.additions, spend_cost
+ )
+ duplicate_cost = spend_cost
+ # If we end up including this item, update this entry's cost
+ new_eligible_spends[coin_id] = DedupCoinSpend(current_solution, duplicate_cost)
+ cost_saving += duplicate_cost
+ # Update the eligible coin spends data
+ self.eligible_spends.update(new_eligible_spends)
+ return unique_coin_spends, uint64(cost_saving), unique_additions
diff --git a/chia/types/internal_mempool_item.py b/chia/types/internal_mempool_item.py
new file mode 100644
--- /dev/null
+++ b/chia/types/internal_mempool_item.py
@@ -0,0 +1,19 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Dict
+
+from chia.consensus.cost_calculator import NPCResult
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.mempool_item import BundleCoinSpend
+from chia.types.spend_bundle import SpendBundle
+from chia.util.ints import uint32
+
+
+@dataclass(frozen=True)
+class InternalMempoolItem:
+ spend_bundle: SpendBundle
+ npc_result: NPCResult
+ height_added_to_mempool: uint32
+ # Map of coin ID to coin spend data between the bundle and its NPCResult
+ bundle_coin_spends: Dict[bytes32, BundleCoinSpend]
diff --git a/chia/types/mempool_item.py b/chia/types/mempool_item.py
--- a/chia/types/mempool_item.py
+++ b/chia/types/mempool_item.py
@@ -1,17 +1,27 @@
from __future__ import annotations
-from dataclasses import dataclass
+from dataclasses import dataclass, field
from typing import Any, Dict, List, Optional
from chia.consensus.cost_calculator import NPCResult
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_spend import CoinSpend
from chia.types.spend_bundle import SpendBundle
from chia.util.generator_tools import additions_for_npc
from chia.util.ints import uint32, uint64
from chia.util.streamable import recurse_jsonify
+@dataclass(frozen=True)
+class BundleCoinSpend:
+ coin_spend: CoinSpend
+ eligible_for_dedup: bool
+ additions: List[Coin]
+ # cost on the specific solution in this item
+ cost: Optional[uint64] = None
+
+
@dataclass(frozen=True)
class MempoolItem:
spend_bundle: SpendBundle
@@ -23,11 +33,14 @@ class MempoolItem:
# If present, this SpendBundle is not valid at or before this height
assert_height: Optional[uint32] = None
- # If presemt, this SpendBundle is not valid once the block height reaches
+ # If present, this SpendBundle is not valid once the block height reaches
# the specified height
assert_before_height: Optional[uint32] = None
assert_before_seconds: Optional[uint64] = None
+ # Map of coin ID to coin spend data between the bundle and its NPCResult
+ bundle_coin_spends: Dict[bytes32, BundleCoinSpend] = field(default_factory=dict)
+
def __lt__(self, other: MempoolItem) -> bool:
return self.fee_per_cost < other.fee_per_cost
@@ -44,8 +57,7 @@ def name(self) -> bytes32:
@property
def cost(self) -> uint64:
- assert self.npc_result.conds is not None
- return uint64(self.npc_result.conds.cost)
+ return self.npc_result.cost
@property
def additions(self) -> List[Coin]:
diff --git a/chia/types/peer_info.py b/chia/types/peer_info.py
--- a/chia/types/peer_info.py
+++ b/chia/types/peer_info.py
@@ -9,6 +9,12 @@
from chia.util.streamable import Streamable, streamable
+@dataclass(frozen=True)
+class UnresolvedPeerInfo:
+ host: str
+ port: uint16
+
+
# TODO, Replace unsafe_hash with frozen and drop the __init__ as soon as all PeerInfo call sites pass in an IPAddress.
@dataclass(unsafe_hash=True)
class PeerInfo:
diff --git a/chia/util/condition_tools.py b/chia/util/condition_tools.py
--- a/chia/util/condition_tools.py
+++ b/chia/util/condition_tools.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from typing import Dict, List, Optional, Tuple
+from typing import Dict, List, Tuple
from clvm.casts import int_from_bytes
@@ -18,19 +18,17 @@
# since asserts can be stripped with python `-OO` flag
-def parse_sexp_to_condition(
- sexp: Program,
-) -> Tuple[Optional[Err], Optional[ConditionWithArgs]]:
+def parse_sexp_to_condition(sexp: Program) -> ConditionWithArgs:
"""
Takes a ChiaLisp sexp and returns a ConditionWithArgs.
- If it fails, returns an Error
+ Raises an ConsensusError if it fails.
"""
first = sexp.pair
if first is None:
- return Err.INVALID_CONDITION, None
+ raise ConsensusError(Err.INVALID_CONDITION, ["first is None"])
op = first[0].atom
if op is None or len(op) != 1:
- return Err.INVALID_CONDITION, None
+ raise ConsensusError(Err.INVALID_CONDITION, ["invalid op"])
# since the ConditionWithArgs only has atoms as the args, we can't parse
# hints and memos with this function. We just exit the loop if we encounter
@@ -46,52 +44,24 @@ def parse_sexp_to_condition(
if len(vars) > 3:
break
- return None, ConditionWithArgs(ConditionOpcode(op), vars)
+ return ConditionWithArgs(ConditionOpcode(op), vars)
-def parse_sexp_to_conditions(
- sexp: Program,
-) -> Tuple[Optional[Err], Optional[List[ConditionWithArgs]]]:
+def parse_sexp_to_conditions(sexp: Program) -> List[ConditionWithArgs]:
"""
Takes a ChiaLisp sexp (list) and returns the list of ConditionWithArgss
- If it fails, returns as Error
+ Raises an ConsensusError if it fails.
"""
- results: List[ConditionWithArgs] = []
- try:
- for _ in sexp.as_iter():
- error, cvp = parse_sexp_to_condition(_)
- if error:
- return error, None
- results.append(cvp) # type: ignore # noqa
- except ConsensusError:
- return Err.INVALID_CONDITION, None
- return None, results
-
-
-def conditions_by_opcode(
- conditions: List[ConditionWithArgs],
-) -> Dict[ConditionOpcode, List[ConditionWithArgs]]:
- """
- Takes a list of ConditionWithArgss(CVP) and return dictionary of CVPs keyed of their opcode
- """
- d: Dict[ConditionOpcode, List[ConditionWithArgs]] = {}
- cvp: ConditionWithArgs
- for cvp in conditions:
- if cvp.opcode not in d:
- d[cvp.opcode] = list()
- d[cvp.opcode].append(cvp)
- return d
-
-
-def pkm_pairs(
- conditions: SpendBundleConditions, additional_data: bytes, *, soft_fork: bool
-) -> Tuple[List[bytes48], List[bytes]]:
+ return [parse_sexp_to_condition(s) for s in sexp.as_iter()]
+
+
+def pkm_pairs(conditions: SpendBundleConditions, additional_data: bytes) -> Tuple[List[bytes48], List[bytes]]:
ret: Tuple[List[bytes48], List[bytes]] = ([], [])
for pk, msg in conditions.agg_sig_unsafe:
ret[0].append(bytes48(pk))
ret[1].append(msg)
- if soft_fork and msg.endswith(additional_data):
+ if msg.endswith(additional_data):
raise ConsensusError(Err.INVALID_CONDITION)
for spend in conditions.spends:
@@ -140,22 +110,21 @@ def conditions_dict_for_solution(
puzzle_reveal: SerializedProgram,
solution: SerializedProgram,
max_cost: int,
-) -> Tuple[Optional[Err], Optional[Dict[ConditionOpcode, List[ConditionWithArgs]]], uint64]:
- error, result, cost = conditions_for_solution(puzzle_reveal, solution, max_cost)
- if error or result is None:
- return error, None, uint64(0)
- return None, conditions_by_opcode(result), cost
+) -> Dict[ConditionOpcode, List[ConditionWithArgs]]:
+ conditions_dict: Dict[ConditionOpcode, List[ConditionWithArgs]] = {}
+ for cvp in conditions_for_solution(puzzle_reveal, solution, max_cost):
+ conditions_dict.setdefault(cvp.opcode, list()).append(cvp)
+ return conditions_dict
def conditions_for_solution(
puzzle_reveal: SerializedProgram,
solution: SerializedProgram,
max_cost: int,
-) -> Tuple[Optional[Err], Optional[List[ConditionWithArgs]], uint64]:
+) -> List[ConditionWithArgs]:
# get the standard script for a puzzle hash and feed in the solution
try:
cost, r = puzzle_reveal.run_with_cost(max_cost, solution)
- error, result = parse_sexp_to_conditions(r)
- return error, result, uint64(cost)
- except Program.EvalError:
- return Err.SEXP_ERROR, None, uint64(0)
+ return parse_sexp_to_conditions(r)
+ except Program.EvalError as e:
+ raise ConsensusError(Err.SEXP_ERROR, [str(e)]) from e
diff --git a/chia/util/create_alert_file.py b/chia/util/create_alert_file.py
deleted file mode 100644
--- a/chia/util/create_alert_file.py
+++ /dev/null
@@ -1,117 +0,0 @@
-from __future__ import annotations
-
-from pathlib import Path
-from typing import List
-
-from blspy import AugSchemeMPL
-
-from chia.util.ints import uint32
-from chia.util.keychain import Keychain
-from chia.util.validate_alert import create_alert_file, create_not_ready_alert_file, validate_alert_file
-
-bitcoin_hash = None
-bram_message = None
-
-status = None
-while True:
- status_input = input("What is the status of this alert? (ready/not ready)").lower()
- if status_input == "ready":
- status = True
- break
- elif status_input == "not ready":
- status = False
- break
- else:
- print("Unknown input")
-
-keychain: Keychain = Keychain()
-print("\n___________ SELECT KEY ____________")
-
-private_keys = keychain.get_all_private_keys()
-if len(private_keys) == 0:
- print("There are no saved private keys.")
- quit()
-print("Showing all private keys:")
-for sk, seed in private_keys:
- print("\nFingerprint:", sk.get_g1().get_fingerprint())
-
-selected_key = None
-while True:
- user_input = input("\nEnter fingerprint of the key you want to use, or enter Q to quit: ").lower()
- if user_input == "q":
- quit()
- for sk, seed in private_keys:
- fingerprint = sk.get_g1().get_fingerprint()
- pub = sk.get_g1()
- if int(user_input) == fingerprint:
- print(f"Selected: {fingerprint}")
- selected_key = sk
- break
-
- if selected_key is not None:
- break
-
-print("\n___________ HD PATH ____________")
-while True:
- hd_path = input("Enter the HD path in the form 'm/12381/8444/n/n', or enter Q to quit: ").lower()
- if hd_path == "q":
- quit()
- verify = input(f"Is this correct path: {hd_path}? (y/n) ").lower()
- if verify == "y":
- break
-
-
-k = Keychain()
-private_keys = k.get_all_private_keys()
-path: List[uint32] = [uint32(int(i)) for i in hd_path.split("/") if i != "m"]
-
-# Derive HD key using path form input
-for c in path:
- selected_key = AugSchemeMPL.derive_child_sk(selected_key, c)
-print("Public key:", selected_key.get_g1())
-
-# get file path
-file_path = None
-while True:
- file_path = input("Enter the path where you want to save signed alert file, or q to quit: ")
- if file_path == "q" or file_path == "Q":
- quit()
- file_path = file_path.strip()
- y_n = input(f"Is this correct path (y/n)?: {file_path} ").lower()
- if y_n == "y":
- break
-f_path: Path = Path(file_path)
-
-if status is True:
- print("")
- print("___________ BITCOIN BLOCK HASH ____________")
- while True:
- bitcoin_hash = input("Insert Bitcoin block hash: ")
- print(f"Bitcoin block hash = {bitcoin_hash}")
- y_n = input("Does this look good (y/n): ").lower()
- if y_n == "y":
- break
-
- print("")
- print("___________ BRAM MESSAGE ____________")
- while True:
- bram_message = input("Insert message from Bram: ")
- print(f"Bram message = {bram_message}")
- y_n = input("Does this look good (y/n): ").lower()
- if y_n == "y":
- break
-
- genesis_challenge_preimage = f"bitcoin_hash:{bitcoin_hash},bram_message:{bram_message}"
-
- create_alert_file(f_path, selected_key, genesis_challenge_preimage)
- print(f"Alert written to file {f_path}")
- pubkey = f"{bytes(selected_key.get_g1()).hex()}"
- validated = validate_alert_file(f_path, pubkey)
- if validated:
- print(f"Signature has passed validation for pubkey: {pubkey}")
- else:
- print(f"Signature has failed validation for pubkey: {pubkey}")
- assert False
-else:
- create_not_ready_alert_file(f_path, selected_key)
- print(f"Alert written to file {f_path}")
diff --git a/chia/util/db_wrapper.py b/chia/util/db_wrapper.py
--- a/chia/util/db_wrapper.py
+++ b/chia/util/db_wrapper.py
@@ -50,16 +50,18 @@ async def manage_connection(
log_path: Optional[Path] = None,
name: Optional[str] = None,
) -> AsyncIterator[aiosqlite.Connection]:
- if log_path is not None:
- with log_path.open("a", encoding="utf-8") as file:
+ async with contextlib.AsyncExitStack() as exit_stack:
+ connection: aiosqlite.Connection
+ if log_path is not None:
+ file = exit_stack.enter_context(log_path.open("a", encoding="utf-8"))
connection = await _create_connection(database=database, uri=uri, log_file=file, name=name)
- else:
- connection = await _create_connection(database=database, uri=uri, name=name)
+ else:
+ connection = await _create_connection(database=database, uri=uri, name=name)
- try:
- yield connection
- finally:
- await connection.close()
+ try:
+ yield connection
+ finally:
+ await connection.close()
def sql_trace_callback(req: str, file: TextIO, name: Optional[str] = None) -> None:
diff --git a/chia/util/default_root.py b/chia/util/default_root.py
--- a/chia/util/default_root.py
+++ b/chia/util/default_root.py
@@ -6,3 +6,5 @@
DEFAULT_ROOT_PATH = Path(os.path.expanduser(os.getenv("CHIA_ROOT", "~/.chia/mainnet"))).resolve()
DEFAULT_KEYS_ROOT_PATH = Path(os.path.expanduser(os.getenv("CHIA_KEYS_ROOT", "~/.chia_keys"))).resolve()
+
+SIMULATOR_ROOT_PATH = Path(os.path.expanduser(os.getenv("CHIA_SIMULATOR_ROOT", "~/.chia/simulator"))).resolve()
diff --git a/chia/util/file_keyring.py b/chia/util/file_keyring.py
--- a/chia/util/file_keyring.py
+++ b/chia/util/file_keyring.py
@@ -10,7 +10,7 @@
from hashlib import pbkdf2_hmac
from pathlib import Path
from secrets import token_bytes
-from typing import Any, Dict, Iterator, Optional, Union
+from typing import Any, Dict, Iterator, Optional, Union, cast
import yaml
from cryptography.hazmat.primitives.ciphers.aead import ChaCha20Poly1305 # pyright: reportMissingModuleSource=false
@@ -386,7 +386,9 @@ def write_keyring(self, fresh_salt: bool = False) -> None:
# When writing for the first time, we should have a cached passphrase which hasn't been
# validated (because it can't be validated yet...)
if not self.has_content() and KeyringWrapper.get_shared_instance().has_cached_master_passphrase():
- passphrase = KeyringWrapper.get_shared_instance().get_cached_master_passphrase()[0]
+ # TODO: The above checks, at the time of writing, make sure we get a str here. A reconsideration of this
+ # interface would be good.
+ passphrase = cast(str, KeyringWrapper.get_shared_instance().get_cached_master_passphrase()[0])
else:
# TODO, this prompts for the passphrase interactively, move this out
passphrase = obtain_current_passphrase(use_passphrase_cache=True)
diff --git a/chia/util/files.py b/chia/util/files.py
--- a/chia/util/files.py
+++ b/chia/util/files.py
@@ -13,7 +13,7 @@
log = logging.getLogger(__name__)
-def move_file(src: Path, dst: Path):
+def move_file(src: Path, dst: Path) -> None:
"""
Attempts to move the file at src to dst, falling back to a copy if the move fails.
"""
@@ -35,7 +35,7 @@ def move_file(src: Path, dst: Path):
raise
-async def move_file_async(src: Path, dst: Path, *, reattempts: int = 6, reattempt_delay: float = 0.5):
+async def move_file_async(src: Path, dst: Path, *, reattempts: int = 6, reattempt_delay: float = 0.5) -> None:
"""
Attempts to move the file at src to dst, making multiple attempts if the move fails.
"""
@@ -60,7 +60,9 @@ async def move_file_async(src: Path, dst: Path, *, reattempts: int = 6, reattemp
log.debug(f"Moved {src} to {dst}")
-async def write_file_async(file_path: Path, data: Union[str, bytes], *, file_mode: int = 0o600, dir_mode: int = 0o700):
+async def write_file_async(
+ file_path: Path, data: Union[str, bytes], *, file_mode: int = 0o600, dir_mode: int = 0o700
+) -> None:
"""
Writes the provided data to a temporary file and then moves it to the final destination.
"""
@@ -71,6 +73,8 @@ async def write_file_async(file_path: Path, data: Union[str, bytes], *, file_mod
mode: Literal["w+", "w+b"] = "w+" if type(data) == str else "w+b"
temp_file_path: Path
async with tempfile.NamedTemporaryFile(dir=file_path.parent, mode=mode, delete=False) as f:
+ # Ignoring type error since it is not obvious how to tie the type of the data
+ # being passed in to the type of the file object, etc.
temp_file_path = f.name # type: ignore[assignment]
await f.write(data) # type: ignore[arg-type]
await f.flush()
diff --git a/chia/util/keychain.py b/chia/util/keychain.py
--- a/chia/util/keychain.py
+++ b/chia/util/keychain.py
@@ -482,10 +482,14 @@ def is_keyring_locked() -> bool:
or if a master passphrase is set and the cached passphrase is valid, the keyring is "unlocked"
"""
# Unlocked: If a master passphrase isn't set, or if the cached passphrase is valid
- if not Keychain.has_master_passphrase() or (
- Keychain.has_cached_passphrase()
- and Keychain.master_passphrase_is_valid(Keychain.get_cached_master_passphrase())
- ):
+ if not Keychain.has_master_passphrase():
+ return False
+
+ passphrase = Keychain.get_cached_master_passphrase()
+ if passphrase is None:
+ return True
+
+ if Keychain.master_passphrase_is_valid(passphrase):
return False
# Locked: Everything else
@@ -545,7 +549,7 @@ def has_cached_passphrase() -> bool:
return KeyringWrapper.get_shared_instance().has_cached_master_passphrase()
@staticmethod
- def get_cached_master_passphrase() -> str:
+ def get_cached_master_passphrase() -> Optional[str]:
"""
Returns the cached master passphrase
"""
diff --git a/chia/util/keyring_wrapper.py b/chia/util/keyring_wrapper.py
--- a/chia/util/keyring_wrapper.py
+++ b/chia/util/keyring_wrapper.py
@@ -2,11 +2,12 @@
from pathlib import Path
from sys import platform
-from typing import Optional, Tuple, Union
+from typing import Optional, Tuple, Union, overload
from keyring.backends.macOS import Keyring as MacKeyring
from keyring.backends.Windows import WinVaultKeyring as WinKeyring
from keyring.errors import KeyringError, PasswordDeleteError
+from typing_extensions import Literal
from chia.util.default_root import DEFAULT_KEYS_ROOT_PATH
from chia.util.file_keyring import FileKeyring
@@ -109,8 +110,23 @@ def set_keys_root_path(keys_root_path: Path):
"""
KeyringWrapper.__keys_root_path = keys_root_path
+ @overload
@staticmethod
- def get_shared_instance(create_if_necessary: bool = True):
+ def get_shared_instance() -> KeyringWrapper:
+ ...
+
+ @overload
+ @staticmethod
+ def get_shared_instance(create_if_necessary: Literal[True]) -> KeyringWrapper:
+ ...
+
+ @overload
+ @staticmethod
+ def get_shared_instance(create_if_necessary: bool) -> Optional[KeyringWrapper]:
+ ...
+
+ @staticmethod
+ def get_shared_instance(create_if_necessary: bool = True) -> Optional[KeyringWrapper]:
if not KeyringWrapper.__shared_instance and create_if_necessary:
KeyringWrapper.__shared_instance = KeyringWrapper(keys_root_path=KeyringWrapper.__keys_root_path)
diff --git a/chia/util/log_exceptions.py b/chia/util/log_exceptions.py
--- a/chia/util/log_exceptions.py
+++ b/chia/util/log_exceptions.py
@@ -3,13 +3,27 @@
import logging
import traceback
from contextlib import contextmanager
+from typing import Tuple, Type, Union
@contextmanager
-def log_exceptions(log: logging.Logger, *, consume: bool = False):
+def log_exceptions(
+ log: logging.Logger,
+ *,
+ consume: bool = False,
+ message: str = "Caught exception",
+ level: int = logging.ERROR,
+ show_traceback: bool = True,
+ exceptions_to_process: Union[Type[BaseException], Tuple[Type[BaseException], ...]] = Exception,
+):
try:
yield
- except Exception as e:
- log.error(f"Caught Exception: {e}. Traceback: {traceback.format_exc()}")
+ except exceptions_to_process as e:
+ message = f"{message}: {type(e).__name__}: {e}"
+ if show_traceback:
+ message += f"\n{traceback.format_exc()}"
+
+ log.log(level, message)
+
if not consume:
raise
diff --git a/chia/util/misc.py b/chia/util/misc.py
--- a/chia/util/misc.py
+++ b/chia/util/misc.py
@@ -7,7 +7,7 @@
from typing import Any, Dict, Sequence, Union
from chia.util.errors import InvalidPathError
-from chia.util.ints import uint16
+from chia.util.ints import uint16, uint32, uint64
from chia.util.streamable import Streamable, recurse_jsonify, streamable
@@ -115,3 +115,17 @@ def validate_directory_writable(path: Path) -> None:
else:
termination_signals = [signal.SIGINT, signal.SIGTERM]
sendable_termination_signals = termination_signals
+
+
+@streamable
+@dataclasses.dataclass(frozen=True)
+class UInt32Range(Streamable):
+ start: uint32 = uint32(0)
+ stop: uint32 = uint32(uint32.MAXIMUM_EXCLUSIVE - 1)
+
+
+@streamable
+@dataclasses.dataclass(frozen=True)
+class UInt64Range(Streamable):
+ start: uint64 = uint64(0)
+ stop: uint64 = uint64(uint64.MAXIMUM_EXCLUSIVE - 1)
diff --git a/chia/util/network.py b/chia/util/network.py
--- a/chia/util/network.py
+++ b/chia/util/network.py
@@ -110,7 +110,11 @@ def is_in_network(peer_host: str, networks: Iterable[Union[IPv4Network, IPv6Netw
def is_localhost(peer_host: str) -> bool:
- return peer_host == "127.0.0.1" or peer_host == "localhost" or peer_host == "::1" or peer_host == "0:0:0:0:0:0:0:1"
+ return peer_host in ["127.0.0.1", "localhost", "::1", "0:0:0:0:0:0:0:1"]
+
+
+def is_trusted_peer(host: str, node_id: bytes32, trusted_peers: Dict[str, Any], testing: bool = False) -> bool:
+ return not testing and is_localhost(host) or node_id.hex() in trusted_peers
def class_for_type(type: NodeType) -> Any:
@@ -141,14 +145,14 @@ def class_for_type(type: NodeType) -> Any:
raise ValueError("No class for type")
-def get_host_addr(host: str, *, prefer_ipv6: bool = False) -> IPAddress:
+async def resolve(host: str, *, prefer_ipv6: bool = False) -> IPAddress:
try:
return IPAddress.create(host)
except ValueError:
pass
addrset: List[
Tuple["socket.AddressFamily", "socket.SocketKind", int, str, Union[Tuple[str, int], Tuple[str, int, int, int]]]
- ] = socket.getaddrinfo(host, None)
+ ] = await asyncio.get_event_loop().getaddrinfo(host, None)
# The list returned by getaddrinfo is never empty, an exception is thrown or data is returned.
ips_v4 = []
ips_v6 = []
@@ -167,17 +171,6 @@ def get_host_addr(host: str, *, prefer_ipv6: bool = False) -> IPAddress:
raise ValueError(f"failed to resolve {host} into an IP address")
-def is_trusted_inner(peer_host: str, peer_node_id: bytes32, trusted_peers: Dict, testing: bool) -> bool:
- if trusted_peers is None:
- return False
- if not testing and peer_host == "127.0.0.1":
- return True
- if peer_node_id.hex() not in trusted_peers:
- return False
-
- return True
-
-
def select_port(prefer_ipv6: bool, addresses: List[Any]) -> uint16:
selected_port: uint16
for address_string, port, *_ in addresses:
diff --git a/chia/util/pip_import.py b/chia/util/pip_import.py
deleted file mode 100644
--- a/chia/util/pip_import.py
+++ /dev/null
@@ -1,19 +0,0 @@
-"Import a package and install it with PIP if it doesn't exist."
-
-from __future__ import annotations
-
-import subprocess
-import sys
-
-
-def pip_import(module, pypi_name=None):
- """
- Return None if we can't import or install it.
- """
- try:
- return __import__(module)
- except ImportError:
- pass
-
- subprocess.call([sys.executable, "-m", "pip", "install", pypi_name or module])
- return __import__(module)
diff --git a/chia/util/profiler.py b/chia/util/profiler.py
--- a/chia/util/profiler.py
+++ b/chia/util/profiler.py
@@ -158,7 +158,7 @@ def analyze_slot_range(profile_dir: pathlib.Path, first: int, last: int):
async def mem_profile_task(root_path: pathlib.Path, service: str, log: logging.Logger) -> None:
- profile_dir = path_from_root(root_path, f"memory-profile-{service}") / datetime.now().strftime("%Y-%m-%d_%H:%M:%S")
+ profile_dir = path_from_root(root_path, f"memory-profile-{service}") / datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
log.info("Starting memory profiler. saving to %s" % profile_dir)
profile_dir.mkdir(parents=True, exist_ok=True)
diff --git a/chia/util/task_timing.py b/chia/util/task_timing.py
--- a/chia/util/task_timing.py
+++ b/chia/util/task_timing.py
@@ -155,6 +155,9 @@ def get_file(frame: FrameType) -> str:
def trace_fun(frame: FrameType, event: str, arg: Any) -> None:
+ if sys.version_info < (3, 8):
+ raise Exception(f"Python 3.8 or higher required, running with: {sys.version}")
+
if event in ["c_call", "c_return", "c_exception"]:
return
diff --git a/chia/util/validate_alert.py b/chia/util/validate_alert.py
deleted file mode 100644
--- a/chia/util/validate_alert.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from __future__ import annotations
-
-import json
-from pathlib import Path
-
-from blspy import AugSchemeMPL, PublicKeyMPL, SignatureMPL
-
-from chia.util.byte_types import hexstr_to_bytes
-from chia.util.hash import std_hash
-
-
-def validate_alert_file(file_path: Path, pubkey: str) -> bool:
- text = file_path.read_text()
- validated = validate_alert(text, pubkey)
- return validated
-
-
-def validate_alert(text: str, pubkey: str) -> bool:
- json_obj = json.loads(text)
- data = json_obj["data"]
- message = bytes(data, "UTF-8")
- signature = json_obj["signature"]
- signature = SignatureMPL.from_bytes(hexstr_to_bytes(signature))
- pubkey_bls = PublicKeyMPL.from_bytes(hexstr_to_bytes(pubkey))
- sig_match_my = AugSchemeMPL.verify(pubkey_bls, message, signature)
-
- return sig_match_my
-
-
-def create_alert_file(alert_file_path: Path, key, genesis_challenge_preimage: str):
- bytes_preimage = bytes(genesis_challenge_preimage, "UTF-8")
- genesis_challenge = std_hash(bytes_preimage)
- file_dict = {
- "ready": True,
- "genesis_challenge": genesis_challenge.hex(),
- "genesis_challenge_preimage": genesis_challenge_preimage,
- }
- data: str = json.dumps(file_dict)
- signature = AugSchemeMPL.sign(key, bytes(data, "utf-8"))
- file_data = {"data": data, "signature": f"{signature}"}
- file_data_json = json.dumps(file_data)
- alert_file_path.write_text(file_data_json)
-
-
-def create_not_ready_alert_file(alert_file_path: Path, key):
- file_dict = {
- "ready": False,
- }
- data: str = json.dumps(file_dict)
- signature = AugSchemeMPL.sign(key, bytes(data, "utf-8"))
- file_data = {"data": data, "signature": f"{signature}"}
- file_data_json = json.dumps(file_data)
- alert_file_path.write_text(file_data_json)
diff --git a/chia/wallet/cat_wallet/cat_utils.py b/chia/wallet/cat_wallet/cat_utils.py
--- a/chia/wallet/cat_wallet/cat_utils.py
+++ b/chia/wallet/cat_wallet/cat_utils.py
@@ -111,14 +111,11 @@ def unsigned_spend_bundle_for_spendable_cats(mod_code: Program, spendable_cat_li
# figure out what the deltas are by running the inner puzzles & solutions
deltas: List[int] = []
for spend_info in spendable_cat_list:
- error, conditions, cost = conditions_dict_for_solution(
- spend_info.inner_puzzle, spend_info.inner_solution, INFINITE_COST
- )
+ conditions = conditions_dict_for_solution(spend_info.inner_puzzle, spend_info.inner_solution, INFINITE_COST)
total = spend_info.extra_delta * -1
- if conditions:
- for _ in conditions.get(ConditionOpcode.CREATE_COIN, []):
- if _.vars[1] != b"\x8f": # -113 in bytes
- total += Program.to(_.vars[1]).as_int()
+ for _ in conditions.get(ConditionOpcode.CREATE_COIN, []):
+ if _.vars[1] != b"\x8f": # -113 in bytes
+ total += Program.to(_.vars[1]).as_int()
deltas.append(spend_info.coin.amount - total)
if sum(deltas) != 0:
diff --git a/chia/wallet/cat_wallet/cat_wallet.py b/chia/wallet/cat_wallet/cat_wallet.py
--- a/chia/wallet/cat_wallet/cat_wallet.py
+++ b/chia/wallet/cat_wallet/cat_wallet.py
@@ -5,7 +5,7 @@
import time
import traceback
from secrets import token_bytes
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple
+from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Set, Tuple, cast
from blspy import AugSchemeMPL, G1Element, G2Element
@@ -51,7 +51,8 @@
from chia.wallet.util.compute_memos import compute_memos
from chia.wallet.util.curry_and_treehash import calculate_hash_of_quoted_mod_hash, curry_and_treehash
from chia.wallet.util.transaction_type import TransactionType
-from chia.wallet.util.wallet_types import AmountWithPuzzlehash, WalletType
+from chia.wallet.util.wallet_sync_utils import fetch_coin_spend_for_coin_state
+from chia.wallet.util.wallet_types import WalletType
from chia.wallet.wallet import Wallet
from chia.wallet.wallet_coin_record import WalletCoinRecord
from chia.wallet.wallet_info import WalletInfo
@@ -67,6 +68,11 @@
class CATWallet:
+ if TYPE_CHECKING:
+ from chia.wallet.wallet_protocol import WalletProtocol
+
+ _protocol_check: ClassVar[WalletProtocol] = cast("CATWallet", None)
+
wallet_state_manager: WalletStateManager
log: logging.Logger
wallet_info: WalletInfo
@@ -139,11 +145,10 @@ async def create_new_cat_wallet(
cat_coin = None
puzzle_store = self.wallet_state_manager.puzzle_store
for c in non_ephemeral_coins:
- info = await puzzle_store.wallet_info_for_puzzle_hash(c.puzzle_hash)
- if info is None:
+ wallet_identifier = await puzzle_store.get_wallet_identifier_for_puzzle_hash(c.puzzle_hash)
+ if wallet_identifier is None:
raise ValueError("Internal Error")
- id, wallet_type = info
- if id == self.id():
+ if wallet_identifier.id == self.id():
cat_coin = c
if cat_coin is None:
@@ -348,9 +353,7 @@ async def coin_added(self, coin: Coin, height: uint32, peer: WSChiaConnection) -
[coin.parent_coin_info], peer=peer
)
assert coin_state[0].coin.name() == coin.parent_coin_info
- coin_spend = await self.wallet_state_manager.wallet_node.fetch_puzzle_solution(
- coin_state[0].spent_height, coin_state[0].coin, peer
- )
+ coin_spend = await fetch_coin_spend_for_coin_state(coin_state[0], peer)
await self.puzzle_solution_received(coin_spend, parent_coin=coin_state[0].coin)
except Exception as e:
self.log.debug(f"Exception: {e}, traceback: {traceback.format_exc()}")
@@ -507,21 +510,20 @@ async def sign(self, spend_bundle: SpendBundle) -> SpendBundle:
raise RuntimeError(f"Failed to get keys for puzzle_hash {puzzle_hash}")
pubkey, private = ret
synthetic_secret_key = calculate_synthetic_secret_key(private, DEFAULT_HIDDEN_PUZZLE_HASH)
- error, conditions, cost = conditions_dict_for_solution(
+ conditions = conditions_dict_for_solution(
spend.puzzle_reveal.to_program(),
spend.solution.to_program(),
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
)
- if conditions is not None:
- synthetic_pk = synthetic_secret_key.get_g1()
- for pk, msg in pkm_pairs_for_conditions_dict(
- conditions, spend.coin.name(), self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA
- ):
- try:
- assert bytes(synthetic_pk) == pk
- sigs.append(AugSchemeMPL.sign(synthetic_secret_key, msg))
- except AssertionError:
- raise ValueError("This spend bundle cannot be signed by the CAT wallet")
+ synthetic_pk = synthetic_secret_key.get_g1()
+ for pk, msg in pkm_pairs_for_conditions_dict(
+ conditions, spend.coin.name(), self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA
+ ):
+ try:
+ assert bytes(synthetic_pk) == pk
+ sigs.append(AugSchemeMPL.sign(synthetic_secret_key, msg))
+ except AssertionError:
+ raise ValueError("This spend bundle cannot be signed by the CAT wallet")
agg_sig = AugSchemeMPL.aggregate(sigs)
return SpendBundle.aggregate([spend_bundle, SpendBundle([], agg_sig)])
@@ -688,9 +690,7 @@ async def generate_unsigned_spendbundle(
# Calculate standard puzzle solutions
change = selected_cat_amount - starting_amount
- primaries: List[AmountWithPuzzlehash] = []
- for payment in payments:
- primaries.append({"puzzlehash": payment.puzzle_hash, "amount": payment.amount, "memos": payment.memos})
+ primaries = payments.copy()
if change > 0:
derivation_record = await self.wallet_state_manager.puzzle_store.get_derivation_record_for_puzzle_hash(
@@ -705,7 +705,7 @@ async def generate_unsigned_spendbundle(
break
else:
change_puzhash = await self.get_new_inner_hash()
- primaries.append({"puzzlehash": change_puzhash, "amount": uint64(change), "memos": []})
+ primaries.append(Payment(change_puzhash, uint64(change), [change_puzhash]))
# Loop through the coins we've selected and gather the information we need to spend them
spendable_cat_list = []
@@ -930,9 +930,3 @@ async def get_coins_to_offer(
if balance < amount:
raise Exception(f"insufficient funds in wallet {self.id()}")
return await self.select_coins(amount, min_coin_amount=min_coin_amount, max_coin_amount=max_coin_amount)
-
-
-if TYPE_CHECKING:
- from chia.wallet.wallet_protocol import WalletProtocol
-
- _dummy: WalletProtocol = CATWallet()
diff --git a/chia/wallet/coin_selection.py b/chia/wallet/coin_selection.py
--- a/chia/wallet/coin_selection.py
+++ b/chia/wallet/coin_selection.py
@@ -1,190 +1,191 @@
-from __future__ import annotations
-
-import logging
-import random
-from typing import Dict, List, Optional, Set
-
-from chia.types.blockchain_format.coin import Coin
-from chia.types.blockchain_format.sized_bytes import bytes32
-from chia.util.ints import uint64, uint128
-from chia.wallet.wallet_coin_record import WalletCoinRecord
-
-
-async def select_coins(
- spendable_amount: uint128,
- max_coin_amount: uint64,
- spendable_coins: List[WalletCoinRecord],
- unconfirmed_removals: Dict[bytes32, Coin],
- log: logging.Logger,
- amount: uint128,
- exclude: Optional[List[Coin]] = None,
- min_coin_amount: Optional[uint64] = None,
- excluded_coin_amounts: Optional[List[uint64]] = None,
-) -> Set[Coin]:
- """
- Returns a set of coins that can be used for generating a new transaction.
- """
- if exclude is None:
- exclude = []
- if min_coin_amount is None:
- min_coin_amount = uint64(0)
- if excluded_coin_amounts is None:
- excluded_coin_amounts = []
-
- if amount > spendable_amount:
- error_msg = (
- f"Can't select amount higher than our spendable balance. Amount: {amount}, spendable: {spendable_amount}"
- )
- log.warning(error_msg)
- raise ValueError(error_msg)
-
- log.debug(f"About to select coins for amount {amount}")
-
- max_num_coins = 500
- sum_spendable_coins = 0
- valid_spendable_coins: List[Coin] = []
-
- for coin_record in spendable_coins: # remove all the unconfirmed coins, excluded coins and dust.
- if coin_record.coin.name() in unconfirmed_removals:
- continue
- if coin_record.coin in exclude:
- continue
- if coin_record.coin.amount < min_coin_amount or coin_record.coin.amount > max_coin_amount:
- continue
- if coin_record.coin.amount in excluded_coin_amounts:
- continue
- valid_spendable_coins.append(coin_record.coin)
- sum_spendable_coins += coin_record.coin.amount
-
- # This happens when we couldn't use one of the coins because it's already used
- # but unconfirmed, and we are waiting for the change. (unconfirmed_additions)
- if sum_spendable_coins < amount:
- raise ValueError(
- f"Transaction for {amount} is greater than spendable balance of {sum_spendable_coins}. "
- "There may be other transactions pending or our minimum coin amount is too high."
- )
- if amount == 0 and sum_spendable_coins == 0:
- raise ValueError(
- "No coins available to spend, you can not create a coin with an amount of 0,"
- " without already having coins."
- )
-
- # Sort the coins by amount
- valid_spendable_coins.sort(reverse=True, key=lambda r: r.amount)
-
- # check for exact 1 to 1 coin match.
- exact_match_coin: Optional[Coin] = check_for_exact_match(valid_spendable_coins, uint64(amount))
- if exact_match_coin:
- log.debug(f"selected coin with an exact match: {exact_match_coin}")
- return {exact_match_coin}
-
- # Check for an exact match with all of the coins smaller than the amount.
- # If we have more, smaller coins than the amount we run the next algorithm.
- smaller_coin_sum = 0 # coins smaller than target.
- smaller_coins: List[Coin] = []
- for coin in valid_spendable_coins:
- if coin.amount < amount:
- smaller_coin_sum += coin.amount
- smaller_coins.append(coin)
- if smaller_coin_sum == amount and len(smaller_coins) < max_num_coins and amount != 0:
- log.debug(f"Selected all smaller coins because they equate to an exact match of the target.: {smaller_coins}")
- return set(smaller_coins)
- elif smaller_coin_sum < amount:
- smallest_coin: Optional[Coin] = select_smallest_coin_over_target(amount, valid_spendable_coins)
- assert smallest_coin is not None # Since we know we have enough, there must be a larger coin
- log.debug(f"Selected closest greater coin: {smallest_coin.name()}")
- return {smallest_coin}
- elif smaller_coin_sum > amount:
- coin_set: Optional[Set[Coin]] = knapsack_coin_algorithm(smaller_coins, amount, max_coin_amount, max_num_coins)
- log.debug(f"Selected coins from knapsack algorithm: {coin_set}")
- if coin_set is None:
- coin_set = sum_largest_coins(amount, smaller_coins)
- if coin_set is None or len(coin_set) > max_num_coins:
- greater_coin = select_smallest_coin_over_target(amount, valid_spendable_coins)
- if greater_coin is None:
- raise ValueError(
- f"Transaction of {amount} mojo would use more than "
- f"{max_num_coins} coins. Try sending a smaller amount"
- )
- coin_set = {greater_coin}
- return coin_set
- else:
- # if smaller_coin_sum == amount and (len(smaller_coins) >= max_num_coins or amount == 0)
- potential_large_coin: Optional[Coin] = select_smallest_coin_over_target(amount, valid_spendable_coins)
- if potential_large_coin is None:
- raise ValueError("Too many coins are required to make this transaction")
- log.debug(f"Resorted to selecting smallest coin over target due to dust.: {potential_large_coin}")
- return {potential_large_coin}
-
-
-# These algorithms were based off of the algorithms in:
-# https://murch.one/wp-content/uploads/2016/11/erhardt2016coinselection.pdf
-
-# we use this to check if one of the coins exactly matches the target.
-def check_for_exact_match(coin_list: List[Coin], target: uint64) -> Optional[Coin]:
- for coin in coin_list:
- if coin.amount == target:
- return coin
- return None
-
-
-# amount of coins smaller than target, followed by a list of all valid spendable coins.
-# Coins must be sorted in descending amount order.
-def select_smallest_coin_over_target(target: uint128, sorted_coin_list: List[Coin]) -> Optional[Coin]:
- if sorted_coin_list[0].amount < target:
- return None
- for coin in reversed(sorted_coin_list):
- if coin.amount >= target:
- return coin
- assert False # Should never reach here
-
-
-# we use this to find the set of coins which have total value closest to the target, but at least the target.
-# IMPORTANT: The coins have to be sorted in descending order or else this function will not work.
-def knapsack_coin_algorithm(
- smaller_coins: List[Coin], target: uint128, max_coin_amount: int, max_num_coins: int, seed: bytes = b"knapsack seed"
-) -> Optional[Set[Coin]]:
- best_set_sum = max_coin_amount
- best_set_of_coins: Optional[Set[Coin]] = None
- ran: random.Random = random.Random()
- ran.seed(seed)
- for i in range(1000):
- # reset these variables every loop.
- selected_coins: Set[Coin] = set()
- selected_coins_sum = 0
- n_pass = 0
- target_reached = False
- while n_pass < 2 and not target_reached:
- for coin in smaller_coins:
- # run 2 passes where the first pass may select a coin 50% of the time.
- # the second pass runs to finish the set if the first pass didn't finish the set.
- # this makes each trial random and increases the chance of getting a perfect set.
- if (n_pass == 0 and bool(ran.getrandbits(1))) or (n_pass == 1 and coin not in selected_coins):
- if len(selected_coins) > max_num_coins:
- break
- selected_coins_sum += coin.amount
- selected_coins.add(coin)
- if selected_coins_sum == target:
- return selected_coins
- if selected_coins_sum > target:
- target_reached = True
- if selected_coins_sum < best_set_sum:
- best_set_of_coins = selected_coins.copy()
- best_set_sum = selected_coins_sum
- selected_coins_sum -= coin.amount
- selected_coins.remove(coin)
- n_pass += 1
- return best_set_of_coins
-
-
-# Adds up the largest coins in the list, resulting in the minimum number of selected coins. A solution
-# is guaranteed if and only if the sum(coins) >= target. Coins must be sorted in descending amount order.
-def sum_largest_coins(target: uint128, sorted_coins: List[Coin]) -> Optional[Set[Coin]]:
- total_value = 0
- selected_coins: Set[Coin] = set()
- for coin in sorted_coins:
- total_value += coin.amount
- selected_coins.add(coin)
- if total_value >= target:
- return selected_coins
- return None
+from __future__ import annotations
+
+import logging
+import random
+from typing import Dict, List, Optional, Set
+
+from chia.types.blockchain_format.coin import Coin
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.util.ints import uint64, uint128
+from chia.wallet.wallet_coin_record import WalletCoinRecord
+
+
+async def select_coins(
+ spendable_amount: uint128,
+ max_coin_amount: uint64,
+ spendable_coins: List[WalletCoinRecord],
+ unconfirmed_removals: Dict[bytes32, Coin],
+ log: logging.Logger,
+ amount: uint128,
+ exclude: Optional[List[Coin]] = None,
+ min_coin_amount: Optional[uint64] = None,
+ excluded_coin_amounts: Optional[List[uint64]] = None,
+) -> Set[Coin]:
+ """
+ Returns a set of coins that can be used for generating a new transaction.
+ """
+ if exclude is None:
+ exclude = []
+ if min_coin_amount is None:
+ min_coin_amount = uint64(0)
+ if excluded_coin_amounts is None:
+ excluded_coin_amounts = []
+
+ if amount > spendable_amount:
+ error_msg = (
+ f"Can't select amount higher than our spendable balance. Amount: {amount}, spendable: {spendable_amount}"
+ )
+ log.warning(error_msg)
+ raise ValueError(error_msg)
+
+ log.debug(f"About to select coins for amount {amount}")
+
+ max_num_coins = 500
+ sum_spendable_coins = 0
+ valid_spendable_coins: List[Coin] = []
+
+ for coin_record in spendable_coins: # remove all the unconfirmed coins, excluded coins and dust.
+ if coin_record.coin.name() in unconfirmed_removals:
+ continue
+ if coin_record.coin in exclude:
+ continue
+ if coin_record.coin.amount < min_coin_amount or coin_record.coin.amount > max_coin_amount:
+ continue
+ if coin_record.coin.amount in excluded_coin_amounts:
+ continue
+ valid_spendable_coins.append(coin_record.coin)
+ sum_spendable_coins += coin_record.coin.amount
+
+ # This happens when we couldn't use one of the coins because it's already used
+ # but unconfirmed, and we are waiting for the change. (unconfirmed_additions)
+ if sum_spendable_coins < amount:
+ raise ValueError(
+ f"Transaction for {amount} is greater than spendable balance of {sum_spendable_coins}. "
+ "There may be other transactions pending or our minimum coin amount is too high."
+ )
+ if amount == 0 and sum_spendable_coins == 0:
+ raise ValueError(
+ "No coins available to spend, you can not create a coin with an amount of 0,"
+ " without already having coins."
+ )
+
+ # Sort the coins by amount
+ valid_spendable_coins.sort(reverse=True, key=lambda r: r.amount)
+
+ # check for exact 1 to 1 coin match.
+ exact_match_coin: Optional[Coin] = check_for_exact_match(valid_spendable_coins, uint64(amount))
+ if exact_match_coin:
+ log.debug(f"selected coin with an exact match: {exact_match_coin}")
+ return {exact_match_coin}
+
+ # Check for an exact match with all of the coins smaller than the amount.
+ # If we have more, smaller coins than the amount we run the next algorithm.
+ smaller_coin_sum = 0 # coins smaller than target.
+ smaller_coins: List[Coin] = []
+ for coin in valid_spendable_coins:
+ if coin.amount < amount:
+ smaller_coin_sum += coin.amount
+ smaller_coins.append(coin)
+ if smaller_coin_sum == amount and len(smaller_coins) < max_num_coins and amount != 0:
+ log.debug(f"Selected all smaller coins because they equate to an exact match of the target.: {smaller_coins}")
+ return set(smaller_coins)
+ elif smaller_coin_sum < amount:
+ smallest_coin: Optional[Coin] = select_smallest_coin_over_target(amount, valid_spendable_coins)
+ assert smallest_coin is not None # Since we know we have enough, there must be a larger coin
+ log.debug(f"Selected closest greater coin: {smallest_coin.name()}")
+ return {smallest_coin}
+ elif smaller_coin_sum > amount:
+ coin_set: Optional[Set[Coin]] = knapsack_coin_algorithm(smaller_coins, amount, max_coin_amount, max_num_coins)
+ log.debug(f"Selected coins from knapsack algorithm: {coin_set}")
+ if coin_set is None:
+ coin_set = sum_largest_coins(amount, smaller_coins)
+ if coin_set is None or len(coin_set) > max_num_coins:
+ greater_coin = select_smallest_coin_over_target(amount, valid_spendable_coins)
+ if greater_coin is None:
+ raise ValueError(
+ f"Transaction of {amount} mojo would use more than "
+ f"{max_num_coins} coins. Try sending a smaller amount"
+ )
+ coin_set = {greater_coin}
+ return coin_set
+ else:
+ # if smaller_coin_sum == amount and (len(smaller_coins) >= max_num_coins or amount == 0)
+ potential_large_coin: Optional[Coin] = select_smallest_coin_over_target(amount, valid_spendable_coins)
+ if potential_large_coin is None:
+ raise ValueError("Too many coins are required to make this transaction")
+ log.debug(f"Resorted to selecting smallest coin over target due to dust.: {potential_large_coin}")
+ return {potential_large_coin}
+
+
+# These algorithms were based off of the algorithms in:
+# https://murch.one/wp-content/uploads/2016/11/erhardt2016coinselection.pdf
+
+
+# we use this to check if one of the coins exactly matches the target.
+def check_for_exact_match(coin_list: List[Coin], target: uint64) -> Optional[Coin]:
+ for coin in coin_list:
+ if coin.amount == target:
+ return coin
+ return None
+
+
+# amount of coins smaller than target, followed by a list of all valid spendable coins.
+# Coins must be sorted in descending amount order.
+def select_smallest_coin_over_target(target: uint128, sorted_coin_list: List[Coin]) -> Optional[Coin]:
+ if sorted_coin_list[0].amount < target:
+ return None
+ for coin in reversed(sorted_coin_list):
+ if coin.amount >= target:
+ return coin
+ assert False # Should never reach here
+
+
+# we use this to find the set of coins which have total value closest to the target, but at least the target.
+# IMPORTANT: The coins have to be sorted in descending order or else this function will not work.
+def knapsack_coin_algorithm(
+ smaller_coins: List[Coin], target: uint128, max_coin_amount: int, max_num_coins: int, seed: bytes = b"knapsack seed"
+) -> Optional[Set[Coin]]:
+ best_set_sum = max_coin_amount
+ best_set_of_coins: Optional[Set[Coin]] = None
+ ran: random.Random = random.Random()
+ ran.seed(seed)
+ for i in range(1000):
+ # reset these variables every loop.
+ selected_coins: Set[Coin] = set()
+ selected_coins_sum = 0
+ n_pass = 0
+ target_reached = False
+ while n_pass < 2 and not target_reached:
+ for coin in smaller_coins:
+ # run 2 passes where the first pass may select a coin 50% of the time.
+ # the second pass runs to finish the set if the first pass didn't finish the set.
+ # this makes each trial random and increases the chance of getting a perfect set.
+ if (n_pass == 0 and bool(ran.getrandbits(1))) or (n_pass == 1 and coin not in selected_coins):
+ if len(selected_coins) > max_num_coins:
+ break
+ selected_coins_sum += coin.amount
+ selected_coins.add(coin)
+ if selected_coins_sum == target:
+ return selected_coins
+ if selected_coins_sum > target:
+ target_reached = True
+ if selected_coins_sum < best_set_sum:
+ best_set_of_coins = selected_coins.copy()
+ best_set_sum = selected_coins_sum
+ selected_coins_sum -= coin.amount
+ selected_coins.remove(coin)
+ n_pass += 1
+ return best_set_of_coins
+
+
+# Adds up the largest coins in the list, resulting in the minimum number of selected coins. A solution
+# is guaranteed if and only if the sum(coins) >= target. Coins must be sorted in descending amount order.
+def sum_largest_coins(target: uint128, sorted_coins: List[Coin]) -> Optional[Set[Coin]]:
+ total_value = 0
+ selected_coins: Set[Coin] = set()
+ for coin in sorted_coins:
+ total_value += coin.amount
+ selected_coins.add(coin)
+ if total_value >= target:
+ return selected_coins
+ return None
diff --git a/chia/wallet/db_wallet/db_wallet_puzzles.py b/chia/wallet/db_wallet/db_wallet_puzzles.py
--- a/chia/wallet/db_wallet/db_wallet_puzzles.py
+++ b/chia/wallet/db_wallet/db_wallet_puzzles.py
@@ -14,10 +14,10 @@
ACS_MU = Program.to(11) # returns the third argument a.k.a the full solution
ACS_MU_PH = ACS_MU.get_tree_hash()
-SINGLETON_TOP_LAYER_MOD = load_clvm_maybe_recompile("singleton_top_layer_v1_1.clvm")
-SINGLETON_LAUNCHER = load_clvm_maybe_recompile("singleton_launcher.clvm")
-GRAFTROOT_DL_OFFERS = load_clvm_maybe_recompile("graftroot_dl_offers.clvm")
-P2_PARENT = load_clvm_maybe_recompile("p2_parent.clvm")
+SINGLETON_TOP_LAYER_MOD = load_clvm_maybe_recompile("singleton_top_layer_v1_1.clsp")
+SINGLETON_LAUNCHER = load_clvm_maybe_recompile("singleton_launcher.clsp")
+GRAFTROOT_DL_OFFERS = load_clvm_maybe_recompile("graftroot_dl_offers.clsp")
+P2_PARENT = load_clvm_maybe_recompile("p2_parent.clsp")
def create_host_fullpuz(innerpuz: Union[Program, bytes32], current_root: bytes32, genesis_id: bytes32) -> Program:
diff --git a/chia/wallet/did_wallet/did_wallet.py b/chia/wallet/did_wallet/did_wallet.py
--- a/chia/wallet/did_wallet/did_wallet.py
+++ b/chia/wallet/did_wallet/did_wallet.py
@@ -6,13 +6,10 @@
import re
import time
from secrets import token_bytes
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple
+from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Set, Tuple, cast
from blspy import AugSchemeMPL, G1Element, G2Element
-import chia.wallet.singleton
-from chia.full_node.full_node_api import FullNodeAPI
-from chia.protocols import wallet_protocol
from chia.protocols.wallet_protocol import CoinState
from chia.server.ws_connection import WSChiaConnection
from chia.types.announcement import Announcement
@@ -30,16 +27,23 @@
from chia.wallet.did_wallet.did_info import DIDInfo
from chia.wallet.did_wallet.did_wallet_puzzles import uncurry_innerpuz
from chia.wallet.lineage_proof import LineageProof
+from chia.wallet.payment import Payment
from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
DEFAULT_HIDDEN_PUZZLE_HASH,
calculate_synthetic_secret_key,
puzzle_for_pk,
puzzle_hash_for_pk,
)
-from chia.wallet.singleton import create_fullpuz
+from chia.wallet.singleton import (
+ SINGLETON_LAUNCHER_PUZZLE,
+ create_singleton_puzzle,
+ create_singleton_puzzle_hash,
+ get_inner_puzzle_from_singleton,
+)
from chia.wallet.transaction_record import TransactionRecord
from chia.wallet.util.compute_memos import compute_memos
from chia.wallet.util.transaction_type import TransactionType
+from chia.wallet.util.wallet_sync_utils import fetch_coin_spend, fetch_coin_spend_for_coin_state
from chia.wallet.util.wallet_types import WalletType
from chia.wallet.wallet import CHIP_0002_SIGN_MESSAGE_PREFIX, Wallet
from chia.wallet.wallet_coin_record import WalletCoinRecord
@@ -47,6 +51,11 @@
class DIDWallet:
+ if TYPE_CHECKING:
+ from chia.wallet.wallet_protocol import WalletProtocol
+
+ _protocol_check: ClassVar[WalletProtocol] = cast("DIDWallet", None)
+
wallet_state_manager: Any
log: logging.Logger
wallet_info: WalletInfo
@@ -377,14 +386,8 @@ async def coin_added(self, coin: Coin, _: uint32, peer: WSChiaConnection):
parent_state: CoinState = (
await self.wallet_state_manager.wallet_node.get_coin_state([coin.parent_coin_info], peer=peer)
)[0]
- assert parent_state.spent_height is not None
- puzzle_solution_request = wallet_protocol.RequestPuzzleSolution(
- coin.parent_coin_info, uint32(parent_state.spent_height)
- )
- response = await peer.call_api(FullNodeAPI.request_puzzle_solution, puzzle_solution_request)
- req_puz_sol = response.response
- assert req_puz_sol.puzzle is not None
- parent_innerpuz = chia.wallet.singleton.get_innerpuzzle_from_puzzle(req_puz_sol.puzzle.to_program())
+ response = await fetch_coin_spend_for_coin_state(parent_state, peer)
+ parent_innerpuz = get_inner_puzzle_from_singleton(response.puzzle_reveal.to_program())
if parent_innerpuz:
parent_info = LineageProof(
parent_state.coin.parent_coin_info,
@@ -403,7 +406,7 @@ async def coin_added(self, coin: Coin, _: uint32, peer: WSChiaConnection):
# TODO: if not the first singleton, and solution mode == recovery
if not self._coin_is_first_singleton(coin):
- full_puzzle = create_fullpuz(inner_puzzle, self.did_info.origin_coin.name())
+ full_puzzle = create_singleton_puzzle(inner_puzzle, self.did_info.origin_coin.name())
assert full_puzzle.get_tree_hash() == coin.puzzle_hash
if self.did_info.temp_coin is not None:
self.wallet_state_manager.state_changed("did_coin_added", self.wallet_info.id)
@@ -470,12 +473,9 @@ async def load_parent(self, did_info: DIDInfo):
did_wallet_puzzles.metadata_to_program(json.loads(self.did_info.metadata)),
)
wallet_node = self.wallet_state_manager.wallet_node
- peer: WSChiaConnection = wallet_node.get_full_node_peer()
- if peer is None:
- raise ValueError("Could not find any peers to request puzzle and solution from")
-
parent_coin: Coin = did_info.origin_coin
while True:
+ peer = wallet_node.get_full_node_peer()
children = await wallet_node.fetch_children(parent_coin.name(), peer)
if len(children) == 0:
break
@@ -504,11 +504,9 @@ async def load_parent(self, did_info: DIDInfo):
await self.save_info(did_info)
assert children_state.created_height
- parent_spend = await wallet_node.fetch_puzzle_solution(children_state.created_height, parent_coin, peer)
+ parent_spend = await fetch_coin_spend(uint32(children_state.created_height), parent_coin, peer)
assert parent_spend is not None
- parent_innerpuz = chia.wallet.singleton.get_innerpuzzle_from_puzzle(
- parent_spend.puzzle_reveal.to_program()
- )
+ parent_innerpuz = get_inner_puzzle_from_singleton(parent_spend.puzzle_reveal.to_program())
assert parent_innerpuz is not None
parent_info = LineageProof(
parent_coin.parent_coin_info,
@@ -519,32 +517,6 @@ async def load_parent(self, did_info: DIDInfo):
parent_coin = child_coin
assert parent_info is not None
- async def create_tandem_xch_tx(
- self,
- fee: uint64,
- announcement_to_assert: Optional[Announcement] = None,
- reuse_puzhash: Optional[bool] = None,
- ) -> TransactionRecord:
- chia_coins = await self.standard_wallet.select_coins(fee)
- if reuse_puzhash is None:
- reuse_puzhash_config = self.wallet_state_manager.config.get("reuse_public_key_for_change", None)
- if reuse_puzhash_config is None:
- reuse_puzhash = False
- else:
- reuse_puzhash = reuse_puzhash_config.get(
- str(self.wallet_state_manager.wallet_node.logged_in_fingerprint), False
- )
- chia_tx = await self.standard_wallet.generate_signed_transaction(
- uint64(0),
- (await self.standard_wallet.get_puzzle_hash(not reuse_puzhash)),
- fee=fee,
- coins=chia_coins,
- coin_announcements_to_consume={announcement_to_assert} if announcement_to_assert is not None else None,
- reuse_puzhash=reuse_puzhash,
- )
- assert chia_tx.spend_bundle is not None
- return chia_tx
-
def puzzle_for_pk(self, pubkey: G1Element) -> Program:
if self.did_info.origin_coin is not None:
innerpuz = did_wallet_puzzles.create_innerpuz(
@@ -554,10 +526,10 @@ def puzzle_for_pk(self, pubkey: G1Element) -> Program:
self.did_info.origin_coin.name(),
did_wallet_puzzles.metadata_to_program(json.loads(self.did_info.metadata)),
)
- return chia.wallet.singleton.create_fullpuz(innerpuz, self.did_info.origin_coin.name())
+ return create_singleton_puzzle(innerpuz, self.did_info.origin_coin.name())
else:
innerpuz = Program.to((8, 0))
- return chia.wallet.singleton.create_fullpuz(innerpuz, bytes32([0] * 32))
+ return create_singleton_puzzle(innerpuz, bytes32([0] * 32))
def puzzle_hash_for_pk(self, pubkey: G1Element) -> bytes32:
if self.did_info.origin_coin is None:
@@ -571,7 +543,7 @@ def puzzle_hash_for_pk(self, pubkey: G1Element) -> bytes32:
origin_coin_name,
did_wallet_puzzles.metadata_to_program(json.loads(self.did_info.metadata)),
)
- return chia.wallet.singleton.create_fullpuz_hash(innerpuz_hash, origin_coin_name)
+ return create_singleton_puzzle_hash(innerpuz_hash, origin_coin_name)
async def get_new_puzzle(self) -> Program:
return self.puzzle_for_pk(
@@ -606,20 +578,14 @@ async def create_update_spend(self, fee: uint64 = uint64(0), reuse_puzhash: Opti
p2_puzzle = uncurried[0]
# innerpuz solution is (mode, p2_solution)
p2_solution = self.standard_wallet.make_solution(
- primaries=[
- {
- "puzzlehash": new_inner_puzzle.get_tree_hash(),
- "amount": uint64(coin.amount),
- "memos": [p2_puzzle.get_tree_hash()],
- }
- ],
+ primaries=[Payment(new_inner_puzzle.get_tree_hash(), uint64(coin.amount), [p2_puzzle.get_tree_hash()])],
coin_announcements={coin.name()},
)
innersol: Program = Program.to([1, p2_solution])
# full solution is (corehash parent_info my_amount innerpuz_reveal solution)
innerpuz: Program = self.did_info.current_inner
- full_puzzle: Program = chia.wallet.singleton.create_fullpuz(
+ full_puzzle: Program = create_singleton_puzzle(
innerpuz,
self.did_info.origin_coin.name(),
)
@@ -637,7 +603,7 @@ async def create_update_spend(self, fee: uint64 = uint64(0), reuse_puzhash: Opti
]
)
# Create an additional spend to confirm the change on-chain
- new_full_puzzle: Program = chia.wallet.singleton.create_fullpuz(
+ new_full_puzzle: Program = create_singleton_puzzle(
new_inner_puzzle,
self.did_info.origin_coin.name(),
)
@@ -658,7 +624,7 @@ async def create_update_spend(self, fee: uint64 = uint64(0), reuse_puzhash: Opti
spend_bundle = await self.sign(unsigned_spend_bundle)
if fee > 0:
announcement_to_make = coin.name()
- chia_tx = await self.create_tandem_xch_tx(
+ chia_tx = await self.standard_wallet.create_tandem_xch_tx(
fee, Announcement(coin.name(), announcement_to_make), reuse_puzhash=reuse_puzhash
)
else:
@@ -722,13 +688,7 @@ async def transfer_did(
did_wallet_puzzles.metadata_to_program(json.loads(self.did_info.metadata)),
)
p2_solution = self.standard_wallet.make_solution(
- primaries=[
- {
- "puzzlehash": new_did_puzhash,
- "amount": uint64(coin.amount),
- "memos": [new_puzhash],
- }
- ],
+ primaries=[Payment(new_did_puzhash, uint64(coin.amount), [new_puzhash])],
coin_announcements={coin.name()},
)
# Need to include backup list reveal here, even we are don't recover
@@ -739,7 +699,7 @@ async def transfer_did(
innersol = Program.to([2, p2_solution, [], [], [], self.did_info.backup_ids])
# full solution is (corehash parent_info my_amount innerpuz_reveal solution)
- full_puzzle: Program = chia.wallet.singleton.create_fullpuz(
+ full_puzzle: Program = create_singleton_puzzle(
self.did_info.current_inner,
self.did_info.origin_coin.name(),
)
@@ -761,7 +721,7 @@ async def transfer_did(
spend_bundle = await self.sign(unsigned_spend_bundle)
if fee > 0:
announcement_to_make = coin.name()
- chia_tx = await self.create_tandem_xch_tx(
+ chia_tx = await self.standard_wallet.create_tandem_xch_tx(
fee, Announcement(coin.name(), announcement_to_make), reuse_puzhash=reuse_puzhash
)
else:
@@ -796,6 +756,8 @@ async def create_message_spend(
self,
coin_announcements: Optional[Set[bytes]] = None,
puzzle_announcements: Optional[Set[bytes]] = None,
+ coin_announcements_to_assert: Optional[Set[Announcement]] = None,
+ puzzle_announcements_to_assert: Optional[Set[Announcement]] = None,
new_innerpuzzle: Optional[Program] = None,
):
assert self.did_info.current_inner is not None
@@ -811,21 +773,21 @@ async def create_message_spend(
assert uncurried is not None
p2_puzzle = uncurried[0]
p2_solution = self.standard_wallet.make_solution(
- primaries=[
- {
- "puzzlehash": new_innerpuzzle.get_tree_hash(),
- "amount": uint64(coin.amount),
- "memos": [p2_puzzle.get_tree_hash()],
- }
- ],
+ primaries=[Payment(new_innerpuzzle.get_tree_hash(), uint64(coin.amount), [p2_puzzle.get_tree_hash()])],
puzzle_announcements=puzzle_announcements,
coin_announcements=coin_announcements,
+ coin_announcements_to_assert={a.name() for a in coin_announcements_to_assert}
+ if coin_announcements_to_assert is not None
+ else None,
+ puzzle_announcements_to_assert={a.name() for a in puzzle_announcements_to_assert}
+ if puzzle_announcements_to_assert is not None
+ else None,
)
# innerpuz solution is (mode p2_solution)
innersol: Program = Program.to([1, p2_solution])
# full solution is (corehash parent_info my_amount innerpuz_reveal solution)
- full_puzzle: Program = chia.wallet.singleton.create_fullpuz(
+ full_puzzle: Program = create_singleton_puzzle(
innerpuz,
self.did_info.origin_coin.name(),
)
@@ -860,7 +822,7 @@ async def create_exit_spend(self, puzhash: bytes32):
# full solution is (corehash parent_info my_amount innerpuz_reveal solution)
innerpuz: Program = self.did_info.current_inner
- full_puzzle: Program = chia.wallet.singleton.create_fullpuz(
+ full_puzzle: Program = create_singleton_puzzle(
innerpuz,
self.did_info.origin_coin.name(),
)
@@ -928,18 +890,14 @@ async def create_attestment(
# innerpuz solution is (mode, p2_solution)
p2_solution = self.standard_wallet.make_solution(
primaries=[
- {
- "puzzlehash": innerpuz.get_tree_hash(),
- "amount": uint64(coin.amount),
- "memos": [p2_puzzle.get_tree_hash()],
- },
- {"puzzlehash": innermessage, "amount": uint64(0), "memos": []},
+ Payment(innerpuz.get_tree_hash(), uint64(coin.amount), [p2_puzzle.get_tree_hash()]),
+ Payment(innermessage, uint64(0)),
],
)
innersol = Program.to([1, p2_solution])
# full solution is (corehash parent_info my_amount innerpuz_reveal solution)
- full_puzzle: Program = chia.wallet.singleton.create_fullpuz(
+ full_puzzle: Program = create_singleton_puzzle(
innerpuz,
self.did_info.origin_coin.name(),
)
@@ -1056,7 +1014,7 @@ async def recovery_spend(
# full solution is (parent_info my_amount solution)
assert self.did_info.current_inner is not None
innerpuz: Program = self.did_info.current_inner
- full_puzzle: Program = chia.wallet.singleton.create_fullpuz(
+ full_puzzle: Program = create_singleton_puzzle(
innerpuz,
self.did_info.origin_coin.name(),
)
@@ -1237,22 +1195,20 @@ async def sign(self, spend_bundle: SpendBundle) -> SpendBundle:
puzzle_hash = p2_puzzle.get_tree_hash()
pubkey, private = await self.wallet_state_manager.get_keys(puzzle_hash)
synthetic_secret_key = calculate_synthetic_secret_key(private, DEFAULT_HIDDEN_PUZZLE_HASH)
- error, conditions, cost = conditions_dict_for_solution(
+ conditions = conditions_dict_for_solution(
spend.puzzle_reveal.to_program(),
spend.solution.to_program(),
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
)
-
- if conditions is not None:
- synthetic_pk = synthetic_secret_key.get_g1()
- for pk, msg in pkm_pairs_for_conditions_dict(
- conditions, spend.coin.name(), self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA
- ):
- try:
- assert bytes(synthetic_pk) == pk
- sigs.append(AugSchemeMPL.sign(synthetic_secret_key, msg))
- except AssertionError:
- raise ValueError("This spend bundle cannot be signed by the DID wallet")
+ synthetic_pk = synthetic_secret_key.get_g1()
+ for pk, msg in pkm_pairs_for_conditions_dict(
+ conditions, spend.coin.name(), self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA
+ ):
+ try:
+ assert bytes(synthetic_pk) == pk
+ sigs.append(AugSchemeMPL.sign(synthetic_secret_key, msg))
+ except AssertionError:
+ raise ValueError("This spend bundle cannot be signed by the DID wallet")
agg_sig = AugSchemeMPL.aggregate(sigs)
return SpendBundle.aggregate([spend_bundle, SpendBundle([], agg_sig)])
@@ -1267,12 +1223,12 @@ async def generate_new_decentralised_id(self, amount: uint64, fee: uint64 = uint
return None
origin = coins.copy().pop()
- genesis_launcher_puz = chia.wallet.singleton.LAUNCHER_PUZZLE
+ genesis_launcher_puz = SINGLETON_LAUNCHER_PUZZLE
launcher_coin = Coin(origin.name(), genesis_launcher_puz.get_tree_hash(), amount)
did_inner: Program = await self.get_new_did_innerpuz(launcher_coin.name())
did_inner_hash = did_inner.get_tree_hash()
- did_full_puz = chia.wallet.singleton.create_fullpuz(did_inner, launcher_coin.name())
+ did_full_puz = create_singleton_puzzle(did_inner, launcher_coin.name())
did_puzzle_hash = did_full_puz.get_tree_hash()
announcement_set: Set[Announcement] = set()
@@ -1353,13 +1309,7 @@ async def generate_eve_spend(self, coin: Coin, full_puzzle: Program, innerpuz: P
p2_puzzle = uncurried[0]
# innerpuz solution is (mode p2_solution)
p2_solution = self.standard_wallet.make_solution(
- primaries=[
- {
- "puzzlehash": innerpuz.get_tree_hash(),
- "amount": uint64(coin.amount),
- "memos": [p2_puzzle.get_tree_hash()],
- }
- ]
+ primaries=[Payment(innerpuz.get_tree_hash(), uint64(coin.amount), [p2_puzzle.get_tree_hash()])]
)
innersol = Program.to([1, p2_solution])
# full solution is (lineage_proof my_amount inner_solution)
@@ -1510,9 +1460,3 @@ def deserialize_backup_data(backup_data: str) -> DIDInfo:
def require_derivation_paths(self) -> bool:
return True
-
-
-if TYPE_CHECKING:
- from chia.wallet.wallet_protocol import WalletProtocol
-
- _dummy: WalletProtocol = DIDWallet()
diff --git a/chia/wallet/did_wallet/did_wallet_puzzles.py b/chia/wallet/did_wallet/did_wallet_puzzles.py
--- a/chia/wallet/did_wallet/did_wallet_puzzles.py
+++ b/chia/wallet/did_wallet/did_wallet_puzzles.py
@@ -12,16 +12,16 @@
from chia.util.ints import uint64
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
from chia.wallet.singleton import (
- LAUNCHER_PUZZLE_HASH,
+ SINGLETON_LAUNCHER_PUZZLE_HASH,
SINGLETON_TOP_LAYER_MOD,
SINGLETON_TOP_LAYER_MOD_HASH,
is_singleton,
)
from chia.wallet.util.curry_and_treehash import calculate_hash_of_quoted_mod_hash, curry_and_treehash
-DID_INNERPUZ_MOD = load_clvm_maybe_recompile("did_innerpuz.clvm")
+DID_INNERPUZ_MOD = load_clvm_maybe_recompile("did_innerpuz.clsp")
DID_INNERPUZ_MOD_HASH = DID_INNERPUZ_MOD.get_tree_hash()
-INTERMEDIATE_LAUNCHER_MOD = load_clvm_maybe_recompile("nft_intermediate_launcher.clvm")
+INTERMEDIATE_LAUNCHER_MOD = load_clvm_maybe_recompile("nft_intermediate_launcher.clsp")
def create_innerpuz(
@@ -45,7 +45,7 @@ def create_innerpuz(
backup_ids_hash = Program(Program.to(recovery_list)).get_tree_hash()
if recovery_list_hash is not None:
backup_ids_hash = recovery_list_hash
- singleton_struct = Program.to((SINGLETON_TOP_LAYER_MOD_HASH, (launcher_id, LAUNCHER_PUZZLE_HASH)))
+ singleton_struct = Program.to((SINGLETON_TOP_LAYER_MOD_HASH, (launcher_id, SINGLETON_LAUNCHER_PUZZLE_HASH)))
return DID_INNERPUZ_MOD.curry(p2_puzzle, backup_ids_hash, num_of_backup_ids_needed, singleton_struct, metadata)
@@ -67,7 +67,7 @@ def get_inner_puzhash_by_p2(
"""
backup_ids_hash = Program(Program.to(recovery_list)).get_tree_hash()
- singleton_struct = Program.to((SINGLETON_TOP_LAYER_MOD_HASH, (launcher_id, LAUNCHER_PUZZLE_HASH)))
+ singleton_struct = Program.to((SINGLETON_TOP_LAYER_MOD_HASH, (launcher_id, SINGLETON_LAUNCHER_PUZZLE_HASH)))
quoted_mod_hash = calculate_hash_of_quoted_mod_hash(DID_INNERPUZ_MOD_HASH)
diff --git a/chia/wallet/nft_wallet/metadata_outer_puzzle.py b/chia/wallet/nft_wallet/metadata_outer_puzzle.py
--- a/chia/wallet/nft_wallet/metadata_outer_puzzle.py
+++ b/chia/wallet/nft_wallet/metadata_outer_puzzle.py
@@ -11,7 +11,7 @@
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
from chia.wallet.uncurried_puzzle import UncurriedPuzzle, uncurry_puzzle
-NFT_STATE_LAYER_MOD = load_clvm_maybe_recompile("nft_state_layer.clvm")
+NFT_STATE_LAYER_MOD = load_clvm_maybe_recompile("nft_state_layer.clsp")
NFT_STATE_LAYER_MOD_HASH = NFT_STATE_LAYER_MOD.get_tree_hash()
diff --git a/chia/wallet/nft_wallet/nft_info.py b/chia/wallet/nft_wallet/nft_info.py
--- a/chia/wallet/nft_wallet/nft_info.py
+++ b/chia/wallet/nft_wallet/nft_info.py
@@ -11,7 +11,7 @@
from chia.wallet.lineage_proof import LineageProof
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
-LAUNCHER_PUZZLE = load_clvm_maybe_recompile("singleton_launcher.clvm")
+LAUNCHER_PUZZLE = load_clvm_maybe_recompile("singleton_launcher.clsp")
IN_TRANSACTION_STATUS = "IN_TRANSACTION"
DEFAULT_STATUS = "DEFAULT"
@@ -29,6 +29,9 @@ class NFTInfo(Streamable):
nft_coin_id: bytes32
"""Current NFT coin ID"""
+ nft_coin_confirmation_height: uint32
+ """Current NFT coin confirmation height"""
+
owner_did: Optional[bytes32]
"""Owner DID"""
diff --git a/chia/wallet/nft_wallet/nft_puzzles.py b/chia/wallet/nft_wallet/nft_puzzles.py
--- a/chia/wallet/nft_wallet/nft_puzzles.py
+++ b/chia/wallet/nft_wallet/nft_puzzles.py
@@ -18,26 +18,26 @@
from chia.wallet.util.address_type import AddressType
log = logging.getLogger(__name__)
-SINGLETON_TOP_LAYER_MOD = load_clvm_maybe_recompile("singleton_top_layer_v1_1.clvm")
-LAUNCHER_PUZZLE = load_clvm_maybe_recompile("singleton_launcher.clvm")
-NFT_STATE_LAYER_MOD = load_clvm_maybe_recompile("nft_state_layer.clvm")
+SINGLETON_TOP_LAYER_MOD = load_clvm_maybe_recompile("singleton_top_layer_v1_1.clsp")
+LAUNCHER_PUZZLE = load_clvm_maybe_recompile("singleton_launcher.clsp")
+NFT_STATE_LAYER_MOD = load_clvm_maybe_recompile("nft_state_layer.clsp")
LAUNCHER_PUZZLE_HASH = LAUNCHER_PUZZLE.get_tree_hash()
SINGLETON_MOD_HASH = SINGLETON_TOP_LAYER_MOD.get_tree_hash()
NFT_STATE_LAYER_MOD_HASH = NFT_STATE_LAYER_MOD.get_tree_hash()
-NFT_METADATA_UPDATER = load_clvm_maybe_recompile("nft_metadata_updater_default.clvm")
-NFT_OWNERSHIP_LAYER = load_clvm_maybe_recompile("nft_ownership_layer.clvm")
+NFT_METADATA_UPDATER = load_clvm_maybe_recompile("nft_metadata_updater_default.clsp")
+NFT_OWNERSHIP_LAYER = load_clvm_maybe_recompile("nft_ownership_layer.clsp")
NFT_OWNERSHIP_LAYER_HASH = NFT_OWNERSHIP_LAYER.get_tree_hash()
NFT_TRANSFER_PROGRAM_DEFAULT = load_clvm_maybe_recompile(
- "nft_ownership_transfer_program_one_way_claim_with_royalties.clvm",
+ "nft_ownership_transfer_program_one_way_claim_with_royalties.clsp",
)
-STANDARD_PUZZLE_MOD = load_clvm_maybe_recompile("p2_delegated_puzzle_or_hidden_puzzle.clvm")
-INTERMEDIATE_LAUNCHER_MOD = load_clvm_maybe_recompile("nft_intermediate_launcher.clvm")
+STANDARD_PUZZLE_MOD = load_clvm_maybe_recompile("p2_delegated_puzzle_or_hidden_puzzle.clsp")
+INTERMEDIATE_LAUNCHER_MOD = load_clvm_maybe_recompile("nft_intermediate_launcher.clsp")
def create_nft_layer_puzzle_with_curry_params(
metadata: Program, metadata_updater_hash: bytes32, inner_puzzle: Program
) -> Program:
- """Curries params into nft_state_layer.clvm
+ """Curries params into nft_state_layer.clsp
Args to curry:
NFT_STATE_LAYER_MOD_HASH
@@ -109,6 +109,7 @@ async def get_nft_info_from_puzzle(
encode_puzzle_hash(uncurried_nft.singleton_launcher_id, prefix=AddressType.NFT.hrp(config=config)),
uncurried_nft.singleton_launcher_id,
nft_coin_info.coin.name(),
+ nft_coin_info.latest_height,
uncurried_nft.owner_did,
uncurried_nft.trade_price_percentage,
uncurried_nft.royalty_address,
diff --git a/chia/wallet/nft_wallet/nft_wallet.py b/chia/wallet/nft_wallet/nft_wallet.py
--- a/chia/wallet/nft_wallet/nft_wallet.py
+++ b/chia/wallet/nft_wallet/nft_wallet.py
@@ -5,7 +5,7 @@
import logging
import math
import time
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, Type, TypeVar
+from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Set, Tuple, Type, TypeVar, cast
from blspy import AugSchemeMPL, G1Element, G2Element
from clvm.casts import int_from_bytes, int_to_bytes
@@ -50,7 +50,8 @@
from chia.wallet.uncurried_puzzle import uncurry_puzzle
from chia.wallet.util.compute_memos import compute_memos
from chia.wallet.util.transaction_type import TransactionType
-from chia.wallet.util.wallet_types import AmountWithPuzzlehash, WalletType
+from chia.wallet.util.wallet_sync_utils import fetch_coin_spend
+from chia.wallet.util.wallet_types import WalletType
from chia.wallet.wallet import CHIP_0002_SIGN_MESSAGE_PREFIX, Wallet
from chia.wallet.wallet_coin_record import WalletCoinRecord
from chia.wallet.wallet_info import WalletInfo
@@ -60,6 +61,11 @@
class NFTWallet:
+ if TYPE_CHECKING:
+ from chia.wallet.wallet_protocol import WalletProtocol
+
+ _protocol_check: ClassVar[WalletProtocol] = cast("NFTWallet", None)
+
wallet_state_manager: Any
log: logging.Logger
wallet_info: WalletInfo
@@ -170,7 +176,7 @@ async def coin_added(self, coin: Coin, height: uint32, peer: WSChiaConnection) -
return
assert coin_states
parent_coin = coin_states[0].coin
- cs = await wallet_node.fetch_puzzle_solution(height, parent_coin, peer)
+ cs = await fetch_coin_spend(height, parent_coin, peer)
assert cs is not None
await self.puzzle_solution_received(cs, peer)
@@ -464,24 +470,23 @@ async def sign(self, spend_bundle: SpendBundle, puzzle_hashes: Optional[List[byt
synthetic_secret_key = calculate_synthetic_secret_key(private, DEFAULT_HIDDEN_PUZZLE_HASH)
synthetic_pk = synthetic_secret_key.get_g1()
pks[bytes(synthetic_pk)] = synthetic_secret_key
- error, conditions, cost = conditions_dict_for_solution(
+ conditions = conditions_dict_for_solution(
spend.puzzle_reveal.to_program(),
spend.solution.to_program(),
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
)
- if conditions is not None:
- for pk, msg in pkm_pairs_for_conditions_dict(
- conditions, spend.coin.name(), self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA
- ):
- try:
- sk = pks.get(pk)
- if sk:
- self.log.debug("Found key, signing for pk: %s", pk)
- sigs.append(AugSchemeMPL.sign(sk, msg))
- else:
- self.log.warning("Couldn't find key for: %s", pk)
- except AssertionError:
- raise ValueError("This spend bundle cannot be signed by the NFT wallet")
+ for pk, msg in pkm_pairs_for_conditions_dict(
+ conditions, spend.coin.name(), self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA
+ ):
+ try:
+ sk = pks.get(pk)
+ if sk:
+ self.log.debug("Found key, signing for pk: %s", pk)
+ sigs.append(AugSchemeMPL.sign(sk, msg))
+ else:
+ self.log.warning("Couldn't find key for: %s", pk)
+ except AssertionError:
+ raise ValueError("This spend bundle cannot be signed by the NFT wallet")
agg_sig = AugSchemeMPL.aggregate(sigs)
return SpendBundle.aggregate([spend_bundle, SpendBundle([], agg_sig)])
@@ -610,32 +615,6 @@ async def create_from_puzzle_info(
name,
)
- async def create_tandem_xch_tx(
- self,
- fee: uint64,
- announcement_to_assert: Optional[Announcement] = None,
- reuse_puzhash: Optional[bool] = None,
- ) -> TransactionRecord:
- chia_coins = await self.standard_wallet.select_coins(fee)
- if reuse_puzhash is None:
- reuse_puzhash_config = self.wallet_state_manager.config.get("reuse_public_key_for_change", None)
- if reuse_puzhash_config is None:
- reuse_puzhash = False
- else:
- reuse_puzhash = reuse_puzhash_config.get(
- str(self.wallet_state_manager.wallet_node.logged_in_fingerprint), False
- )
- chia_tx = await self.standard_wallet.generate_signed_transaction(
- uint64(0),
- (await self.standard_wallet.get_puzzle_hash(not reuse_puzhash)),
- fee=fee,
- coins=chia_coins,
- coin_announcements_to_consume={announcement_to_assert} if announcement_to_assert is not None else None,
- reuse_puzhash=reuse_puzhash,
- )
- assert chia_tx.spend_bundle is not None
- return chia_tx
-
async def generate_signed_transaction(
self,
amounts: List[uint64],
@@ -745,13 +724,9 @@ async def generate_unsigned_spendbundle(
else:
puzzle_announcements_bytes = None
- primaries: List[AmountWithPuzzlehash] = []
- for payment in payments:
- primaries.append({"puzzlehash": payment.puzzle_hash, "amount": payment.amount, "memos": payment.memos})
-
if fee > 0:
announcement_to_make = nft_coin.coin.name()
- chia_tx = await self.create_tandem_xch_tx(
+ chia_tx = await self.standard_wallet.create_tandem_xch_tx(
fee, Announcement(nft_coin.coin.name(), announcement_to_make), reuse_puzhash=reuse_puzhash
)
else:
@@ -759,7 +734,7 @@ async def generate_unsigned_spendbundle(
chia_tx = None
innersol: Program = self.standard_wallet.make_solution(
- primaries=primaries,
+ primaries=payments,
coin_announcements=None if announcement_to_make is None else set((announcement_to_make,)),
coin_announcements_to_assert=coin_announcements_bytes,
puzzle_announcements_to_assert=puzzle_announcements_bytes,
@@ -975,15 +950,7 @@ async def make_nft1_offer(
tx = await wallet.generate_signed_transaction(
abs(amount),
DESIRED_OFFER_MOD_HASH,
- primaries=[
- AmountWithPuzzlehash(
- {
- "amount": uint64(payment_sum),
- "puzzlehash": DESIRED_OFFER_MOD_HASH,
- "memos": [],
- }
- )
- ]
+ primaries=[Payment(DESIRED_OFFER_MOD_HASH, uint64(payment_sum))]
if payment_sum > 0 or old
else [],
fee=fee,
@@ -1055,7 +1022,6 @@ async def make_nft1_offer(
Payment(
DESIRED_OFFER_MOD_HASH,
uint64(sum(p.amount for _, p in duplicate_payments)),
- [],
).as_condition_args()
],
)
@@ -1333,11 +1299,7 @@ async def mint_from_did(
new_p2_puzhash = p2_puzzle.get_tree_hash()
assert new_p2_puzhash is not None
# make the primaries for the DID spend
- primaries = [
- AmountWithPuzzlehash(
- {"puzzlehash": new_innerpuzhash, "amount": uint64(did_coin.amount), "memos": [bytes(new_p2_puzhash)]}
- )
- ]
+ primaries = [Payment(new_innerpuzhash, uint64(did_coin.amount), [bytes(new_p2_puzhash)])]
# Ensure we have an xch coin of high enough amount
assert isinstance(fee, uint64)
@@ -1371,18 +1333,10 @@ async def mint_from_did(
for mint_number in range(mint_number_start, mint_number_end):
# Create the puzzle, solution and coin spend for the intermediate launcher
intermediate_launcher_puz = did_wallet_puzzles.INTERMEDIATE_LAUNCHER_MOD.curry(
- chia.wallet.singleton.LAUNCHER_PUZZLE_HASH, mint_number, mint_total
+ chia.wallet.singleton.SINGLETON_LAUNCHER_PUZZLE_HASH, mint_number, mint_total
)
intermediate_launcher_ph = intermediate_launcher_puz.get_tree_hash()
- primaries.append(
- AmountWithPuzzlehash(
- {
- "puzzlehash": intermediate_launcher_ph,
- "amount": uint64(0),
- "memos": [intermediate_launcher_ph],
- }
- )
- )
+ primaries.append(Payment(intermediate_launcher_ph, uint64(0), [intermediate_launcher_ph]))
intermediate_launcher_sol = Program.to([])
intermediate_launcher_coin = Coin(did_coin.name(), intermediate_launcher_ph, uint64(0))
intermediate_launcher_coin_spend = CoinSpend(
@@ -1397,7 +1351,9 @@ async def mint_from_did(
did_announcements.add(std_hash(intermediate_launcher_coin.name() + intermediate_announcement_message))
# Create the launcher coin, and add its id to a list to be asserted in the DID spend
- launcher_coin = Coin(intermediate_launcher_coin.name(), chia.wallet.singleton.LAUNCHER_PUZZLE_HASH, amount)
+ launcher_coin = Coin(
+ intermediate_launcher_coin.name(), chia.wallet.singleton.SINGLETON_LAUNCHER_PUZZLE_HASH, amount
+ )
launcher_ids.append(launcher_coin.name())
# Grab the metadata from metadata_list. The index for metadata_list
@@ -1422,7 +1378,9 @@ async def mint_from_did(
genesis_launcher_solution = Program.to([eve_fullpuz.get_tree_hash(), amount, []])
- launcher_cs = CoinSpend(launcher_coin, chia.wallet.singleton.LAUNCHER_PUZZLE, genesis_launcher_solution)
+ launcher_cs = CoinSpend(
+ launcher_coin, chia.wallet.singleton.SINGLETON_LAUNCHER_PUZZLE, genesis_launcher_solution
+ )
launcher_spends.append(launcher_cs)
eve_coin = Coin(launcher_coin.name(), eve_fullpuz.get_tree_hash(), uint64(amount))
@@ -1474,18 +1432,14 @@ async def mint_from_did(
xch_spends = []
if xch_change_ph is None:
xch_change_ph = await self.standard_wallet.get_new_puzzlehash()
- xch_primaries = [
- AmountWithPuzzlehash({"puzzlehash": xch_change_ph, "amount": change, "memos": [xch_change_ph]})
- ]
+ xch_payment = Payment(xch_change_ph, change, [xch_change_ph])
first = True
for xch_coin in xch_coins:
puzzle: Program = await self.standard_wallet.puzzle_for_puzzle_hash(xch_coin.puzzle_hash)
if first:
message_list: List[bytes32] = [c.name() for c in xch_coins]
- message_list.append(
- Coin(xch_coin.name(), xch_primaries[0]["puzzlehash"], xch_primaries[0]["amount"]).name()
- )
+ message_list.append(Coin(xch_coin.name(), xch_payment.puzzle_hash, xch_payment.amount).name())
message: bytes32 = std_hash(b"".join(message_list))
if len(xch_coins) > 1:
@@ -1494,7 +1448,7 @@ async def mint_from_did(
xch_announcement = None
solution: Program = self.standard_wallet.make_solution(
- primaries=xch_primaries,
+ primaries=[xch_payment],
fee=fee,
coin_announcements=xch_announcement,
coin_announcements_to_assert={Announcement(did_coin.name(), message).name()},
@@ -1518,7 +1472,7 @@ async def mint_from_did(
puzzle_announcements_to_assert=puzzle_assertions,
)
did_inner_sol: Program = Program.to([1, did_p2_solution])
- did_full_puzzle: Program = chia.wallet.singleton.create_fullpuz(
+ did_full_puzzle: Program = chia.wallet.singleton.create_singleton_puzzle(
innerpuz,
did_wallet.did_info.origin_coin.name(),
)
@@ -1624,15 +1578,7 @@ async def mint_from_xch(
nft_puzzles.LAUNCHER_PUZZLE_HASH, mint_number, mint_total
)
intermediate_launcher_ph = intermediate_launcher_puz.get_tree_hash()
- primaries.append(
- AmountWithPuzzlehash(
- {
- "puzzlehash": intermediate_launcher_ph,
- "amount": uint64(1),
- "memos": [intermediate_launcher_ph],
- }
- )
- )
+ primaries.append(Payment(intermediate_launcher_ph, uint64(1), [intermediate_launcher_ph]))
intermediate_launcher_sol = Program.to([])
intermediate_launcher_coin = Coin(funding_coin.name(), intermediate_launcher_ph, uint64(1))
intermediate_launcher_coin_spend = CoinSpend(
@@ -1723,18 +1669,14 @@ async def mint_from_xch(
xch_spends = []
if xch_change_ph is None:
xch_change_ph = await self.standard_wallet.get_new_puzzlehash()
- xch_primaries = [
- AmountWithPuzzlehash({"puzzlehash": xch_change_ph, "amount": change, "memos": [xch_change_ph]})
- ]
+ xch_payment = Payment(xch_change_ph, change, [xch_change_ph])
first = True
for xch_coin in xch_coins:
puzzle: Program = await self.standard_wallet.puzzle_for_puzzle_hash(xch_coin.puzzle_hash)
if first:
message_list: List[bytes32] = [c.name() for c in xch_coins]
- message_list.append(
- Coin(xch_coin.name(), xch_primaries[0]["puzzlehash"], xch_primaries[0]["amount"]).name()
- )
+ message_list.append(Coin(xch_coin.name(), xch_payment.puzzle_hash, xch_payment.amount).name())
message: bytes32 = std_hash(b"".join(message_list))
if len(xch_coins) > 1:
@@ -1743,7 +1685,7 @@ async def mint_from_xch(
xch_announcement = None
solution: Program = self.standard_wallet.make_solution(
- primaries=xch_primaries + primaries,
+ primaries=[xch_payment] + primaries,
fee=fee,
coin_announcements=xch_announcement if len(xch_coins) > 1 else None,
coin_announcements_to_assert=coin_announcements,
@@ -1785,9 +1727,3 @@ def puzzle_hash_for_pk(self, pubkey: G1Element) -> bytes32:
def get_name(self) -> str:
return self.wallet_info.name
-
-
-if TYPE_CHECKING:
- from chia.wallet.wallet_protocol import WalletProtocol
-
- _dummy: WalletProtocol = NFTWallet()
diff --git a/chia/wallet/nft_wallet/ownership_outer_puzzle.py b/chia/wallet/nft_wallet/ownership_outer_puzzle.py
--- a/chia/wallet/nft_wallet/ownership_outer_puzzle.py
+++ b/chia/wallet/nft_wallet/ownership_outer_puzzle.py
@@ -11,7 +11,7 @@
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
from chia.wallet.uncurried_puzzle import UncurriedPuzzle, uncurry_puzzle
-OWNERSHIP_LAYER_MOD = load_clvm_maybe_recompile("nft_ownership_layer.clvm")
+OWNERSHIP_LAYER_MOD = load_clvm_maybe_recompile("nft_ownership_layer.clsp")
def match_ownership_layer_puzzle(puzzle: UncurriedPuzzle) -> Tuple[bool, List[Program]]:
diff --git a/chia/wallet/nft_wallet/transfer_program_puzzle.py b/chia/wallet/nft_wallet/transfer_program_puzzle.py
--- a/chia/wallet/nft_wallet/transfer_program_puzzle.py
+++ b/chia/wallet/nft_wallet/transfer_program_puzzle.py
@@ -11,7 +11,7 @@
from chia.wallet.puzzles.singleton_top_layer_v1_1 import SINGLETON_LAUNCHER_HASH, SINGLETON_MOD_HASH
from chia.wallet.uncurried_puzzle import UncurriedPuzzle
-TRANSFER_PROGRAM_MOD = load_clvm_maybe_recompile("nft_ownership_transfer_program_one_way_claim_with_royalties.clvm")
+TRANSFER_PROGRAM_MOD = load_clvm_maybe_recompile("nft_ownership_transfer_program_one_way_claim_with_royalties.clsp")
def match_transfer_program_puzzle(puzzle: UncurriedPuzzle) -> Tuple[bool, List[Program]]:
diff --git a/chia/wallet/nft_wallet/uncurry_nft.py b/chia/wallet/nft_wallet/uncurry_nft.py
--- a/chia/wallet/nft_wallet/uncurry_nft.py
+++ b/chia/wallet/nft_wallet/uncurry_nft.py
@@ -10,9 +10,9 @@
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
log = logging.getLogger(__name__)
-SINGLETON_TOP_LAYER_MOD = load_clvm_maybe_recompile("singleton_top_layer_v1_1.clvm")
-NFT_MOD = load_clvm_maybe_recompile("nft_state_layer.clvm")
-NFT_OWNERSHIP_LAYER = load_clvm_maybe_recompile("nft_ownership_layer.clvm")
+SINGLETON_TOP_LAYER_MOD = load_clvm_maybe_recompile("singleton_top_layer_v1_1.clsp")
+NFT_MOD = load_clvm_maybe_recompile("nft_state_layer.clsp")
+NFT_OWNERSHIP_LAYER = load_clvm_maybe_recompile("nft_ownership_layer.clsp")
_T_UncurriedNFT = TypeVar("_T_UncurriedNFT", bound="UncurriedNFT")
diff --git a/chia/wallet/notification_manager.py b/chia/wallet/notification_manager.py
--- a/chia/wallet/notification_manager.py
+++ b/chia/wallet/notification_manager.py
@@ -2,7 +2,7 @@
import dataclasses
import logging
-from typing import Any, Dict, List, Optional, Set, Tuple
+from typing import Any, Dict, List, Optional, Set
from blspy import G2Element
@@ -56,14 +56,14 @@ async def potentially_add_new_notification(self, coin_state: CoinState, parent_s
else:
memos: Dict[bytes32, List[bytes]] = compute_memos_for_spend(parent_spend)
coin_memos: List[bytes] = memos.get(coin_name, [])
- if len(coin_memos) == 0:
+ if len(coin_memos) == 0 or len(coin_memos[0]) != 32:
return False
- wallet_info: Optional[
- Tuple[uint32, WalletType]
- ] = await self.wallet_state_manager.get_wallet_id_for_puzzle_hash(bytes32(coin_memos[0]))
+ wallet_identifier = await self.wallet_state_manager.get_wallet_identifier_for_puzzle_hash(
+ bytes32(coin_memos[0])
+ )
if (
- wallet_info is not None
- and wallet_info[1] == WalletType.STANDARD_WALLET
+ wallet_identifier is not None
+ and wallet_identifier.type == WalletType.STANDARD_WALLET
and len(coin_memos) == 2
and construct_notification(bytes32(coin_memos[0]), uint64(coin_state.coin.amount)).get_tree_hash()
== coin_state.coin.puzzle_hash
diff --git a/chia/wallet/payment.py b/chia/wallet/payment.py
--- a/chia/wallet/payment.py
+++ b/chia/wallet/payment.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from dataclasses import dataclass
+from dataclasses import dataclass, field
from typing import List
from chia.types.blockchain_format.program import Program
@@ -13,7 +13,7 @@
class Payment:
puzzle_hash: bytes32
amount: uint64
- memos: List[bytes]
+ memos: List[bytes] = field(default_factory=list)
def as_condition_args(self) -> List:
return [self.puzzle_hash, self.amount, self.memos]
diff --git a/chia/wallet/puzzles/cat_loader.py b/chia/wallet/puzzles/cat_loader.py
--- a/chia/wallet/puzzles/cat_loader.py
+++ b/chia/wallet/puzzles/cat_loader.py
@@ -2,7 +2,6 @@
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
-CAT_MOD = load_clvm_maybe_recompile("cat_v2.clvm", package_or_requirement=__name__)
-LOCK_INNER_PUZZLE = load_clvm_maybe_recompile("lock.inner.puzzle.clvm", package_or_requirement=__name__)
+CAT_MOD = load_clvm_maybe_recompile("cat_v2.clsp", package_or_requirement=__name__)
CAT_MOD_HASH = CAT_MOD.get_tree_hash()
diff --git a/chia/wallet/puzzles/load_clvm.py b/chia/wallet/puzzles/load_clvm.py
--- a/chia/wallet/puzzles/load_clvm.py
+++ b/chia/wallet/puzzles/load_clvm.py
@@ -17,7 +17,9 @@
compile_clvm_py = None
-recompile_requested = (os.environ.get("CHIA_DEV_COMPILE_CLVM_ON_IMPORT", "") != "") or ("pytest" in sys.modules)
+recompile_requested = (
+ (os.environ.get("CHIA_DEV_COMPILE_CLVM_ON_IMPORT", "") != "") or ("pytest" in sys.modules)
+) and os.environ.get("CHIA_DEV_COMPILE_CLVM_DISABLED", None) is None
def translate_path(p_):
@@ -82,8 +84,8 @@ def load_serialized_clvm(
clvm_filename, package_or_requirement=__name__, include_standard_libraries: bool = False, recompile: bool = True
) -> SerializedProgram:
"""
- This function takes a .clvm file in the given package and compiles it to a
- .clvm.hex file if the .hex file is missing or older than the .clvm file, then
+ This function takes a .clsp file in the given package and compiles it to a
+ .clsp.hex file if the .hex file is missing or older than the .clsp file, then
returns the contents of the .hex file as a `Program`.
clvm_filename: file name
diff --git a/chia/wallet/puzzles/p2_conditions.py b/chia/wallet/puzzles/p2_conditions.py
--- a/chia/wallet/puzzles/p2_conditions.py
+++ b/chia/wallet/puzzles/p2_conditions.py
@@ -16,7 +16,7 @@
from .load_clvm import load_clvm_maybe_recompile
-MOD = load_clvm_maybe_recompile("p2_conditions.clvm")
+MOD = load_clvm_maybe_recompile("p2_conditions.clsp")
def puzzle_for_conditions(conditions) -> Program:
diff --git a/chia/wallet/puzzles/p2_delegated_conditions.py b/chia/wallet/puzzles/p2_delegated_conditions.py
--- a/chia/wallet/puzzles/p2_delegated_conditions.py
+++ b/chia/wallet/puzzles/p2_delegated_conditions.py
@@ -12,7 +12,7 @@
from .load_clvm import load_clvm_maybe_recompile
-MOD = load_clvm_maybe_recompile("p2_delegated_conditions.clvm")
+MOD = load_clvm_maybe_recompile("p2_delegated_conditions.clsp")
def puzzle_for_pk(public_key: Program) -> Program:
diff --git a/chia/wallet/puzzles/p2_delegated_puzzle.py b/chia/wallet/puzzles/p2_delegated_puzzle.py
--- a/chia/wallet/puzzles/p2_delegated_puzzle.py
+++ b/chia/wallet/puzzles/p2_delegated_puzzle.py
@@ -19,7 +19,7 @@
from . import p2_conditions
from .load_clvm import load_clvm_maybe_recompile
-MOD = load_clvm_maybe_recompile("p2_delegated_puzzle.clvm")
+MOD = load_clvm_maybe_recompile("p2_delegated_puzzle.clsp")
def puzzle_for_pk(public_key: bytes) -> Program:
diff --git a/chia/wallet/puzzles/p2_delegated_puzzle_or_hidden_puzzle.py b/chia/wallet/puzzles/p2_delegated_puzzle_or_hidden_puzzle.py
--- a/chia/wallet/puzzles/p2_delegated_puzzle_or_hidden_puzzle.py
+++ b/chia/wallet/puzzles/p2_delegated_puzzle_or_hidden_puzzle.py
@@ -74,7 +74,7 @@
DEFAULT_HIDDEN_PUZZLE_HASH = DEFAULT_HIDDEN_PUZZLE.get_tree_hash() # this puzzle `(x)` always fails
-MOD = load_clvm_maybe_recompile("p2_delegated_puzzle_or_hidden_puzzle.clvm")
+MOD = load_clvm_maybe_recompile("p2_delegated_puzzle_or_hidden_puzzle.clsp")
QUOTED_MOD_HASH = calculate_hash_of_quoted_mod_hash(MOD.get_tree_hash())
diff --git a/chia/wallet/puzzles/p2_m_of_n_delegate_direct.py b/chia/wallet/puzzles/p2_m_of_n_delegate_direct.py
--- a/chia/wallet/puzzles/p2_m_of_n_delegate_direct.py
+++ b/chia/wallet/puzzles/p2_m_of_n_delegate_direct.py
@@ -11,7 +11,7 @@
from .load_clvm import load_clvm_maybe_recompile
-MOD = load_clvm_maybe_recompile("p2_m_of_n_delegate_direct.clvm")
+MOD = load_clvm_maybe_recompile("p2_m_of_n_delegate_direct.clsp")
def puzzle_for_m_of_public_key_list(m, public_key_list) -> Program:
diff --git a/chia/wallet/puzzles/p2_puzzle_hash.py b/chia/wallet/puzzles/p2_puzzle_hash.py
--- a/chia/wallet/puzzles/p2_puzzle_hash.py
+++ b/chia/wallet/puzzles/p2_puzzle_hash.py
@@ -12,7 +12,7 @@
from .load_clvm import load_clvm_maybe_recompile
-MOD = load_clvm_maybe_recompile("p2_puzzle_hash.clvm")
+MOD = load_clvm_maybe_recompile("p2_puzzle_hash.clsp")
def puzzle_for_inner_puzzle_hash(inner_puzzle_hash: bytes32) -> Program:
diff --git a/chia/wallet/puzzles/prefarm/make_prefarm_ph.py b/chia/wallet/puzzles/prefarm/make_prefarm_ph.py
--- a/chia/wallet/puzzles/prefarm/make_prefarm_ph.py
+++ b/chia/wallet/puzzles/prefarm/make_prefarm_ph.py
@@ -40,20 +40,14 @@ def make_puzzle(amount: int) -> int:
print(f"Address: {encode_puzzle_hash(puzzle_hash, prefix)}")
result = puzzle_prog.run(solution)
- error, result_human = parse_sexp_to_conditions(result)
-
total_chia = 0
- if error:
- print(f"Error: {error}")
- else:
- assert result_human is not None
- for cvp in result_human:
- assert len(cvp.vars) == 2
- total_chia += int_from_bytes(cvp.vars[1])
- print(
- f"{ConditionOpcode(cvp.opcode).name}: {encode_puzzle_hash(bytes32(cvp.vars[0]), prefix)},"
- f" amount: {int_from_bytes(cvp.vars[1])}"
- )
+ for cvp in parse_sexp_to_conditions(result):
+ assert len(cvp.vars) == 2
+ total_chia += int_from_bytes(cvp.vars[1])
+ print(
+ f"{ConditionOpcode(cvp.opcode).name}: {encode_puzzle_hash(bytes32(cvp.vars[0]), prefix)},"
+ f" amount: {int_from_bytes(cvp.vars[1])}"
+ )
return total_chia
diff --git a/chia/wallet/puzzles/prefarm/spend_prefarm.py b/chia/wallet/puzzles/prefarm/spend_prefarm.py
--- a/chia/wallet/puzzles/prefarm/spend_prefarm.py
+++ b/chia/wallet/puzzles/prefarm/spend_prefarm.py
@@ -22,10 +22,7 @@ def print_conditions(spend_bundle: SpendBundle):
print("\nConditions:")
for coin_spend in spend_bundle.coin_spends:
result = Program.from_bytes(bytes(coin_spend.puzzle_reveal)).run(Program.from_bytes(bytes(coin_spend.solution)))
- error, result_human = parse_sexp_to_conditions(result)
- assert error is None
- assert result_human is not None
- for cvp in result_human:
+ for cvp in parse_sexp_to_conditions(result):
print(f"{ConditionOpcode(cvp.opcode).name}: {[var.hex() for var in cvp.vars]}")
print("")
diff --git a/chia/wallet/puzzles/puzzle_utils.py b/chia/wallet/puzzles/puzzle_utils.py
--- a/chia/wallet/puzzles/puzzle_utils.py
+++ b/chia/wallet/puzzles/puzzle_utils.py
@@ -1,14 +1,15 @@
from __future__ import annotations
-from typing import List, Optional
+from typing import List
from chia.types.condition_opcodes import ConditionOpcode
-def make_create_coin_condition(puzzle_hash, amount, memos: Optional[List[bytes]]) -> List:
- if memos is not None:
- return [ConditionOpcode.CREATE_COIN, puzzle_hash, amount, memos]
- return [ConditionOpcode.CREATE_COIN, puzzle_hash, amount]
+def make_create_coin_condition(puzzle_hash, amount, memos: List[bytes]) -> List:
+ condition = [ConditionOpcode.CREATE_COIN, puzzle_hash, amount]
+ if len(memos) > 0:
+ condition.append(memos)
+ return condition
def make_assert_aggsig_condition(pubkey):
diff --git a/chia/wallet/puzzles/rom_bootstrap_generator.py b/chia/wallet/puzzles/rom_bootstrap_generator.py
--- a/chia/wallet/puzzles/rom_bootstrap_generator.py
+++ b/chia/wallet/puzzles/rom_bootstrap_generator.py
@@ -4,8 +4,5 @@
from .load_clvm import load_serialized_clvm_maybe_recompile
-MOD = load_serialized_clvm_maybe_recompile("rom_bootstrap_generator.clvm")
-
-
-def get_generator() -> SerializedProgram:
- return MOD
+GENERATOR_MOD: SerializedProgram = load_serialized_clvm_maybe_recompile("rom_bootstrap_generator.clsp")
+GENERATOR2_MOD: SerializedProgram = load_serialized_clvm_maybe_recompile("rom_bootstrap_generator2.clsp")
diff --git a/chia/wallet/puzzles/singleton_top_layer.py b/chia/wallet/puzzles/singleton_top_layer.py
--- a/chia/wallet/puzzles/singleton_top_layer.py
+++ b/chia/wallet/puzzles/singleton_top_layer.py
@@ -12,11 +12,11 @@
from chia.wallet.lineage_proof import LineageProof
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
-SINGLETON_MOD = load_clvm_maybe_recompile("singleton_top_layer.clvm")
+SINGLETON_MOD = load_clvm_maybe_recompile("singleton_top_layer.clsp")
SINGLETON_MOD_HASH = SINGLETON_MOD.get_tree_hash()
-P2_SINGLETON_MOD = load_clvm_maybe_recompile("p2_singleton.clvm")
-P2_SINGLETON_OR_DELAYED_MOD = load_clvm_maybe_recompile("p2_singleton_or_delayed_puzhash.clvm")
-SINGLETON_LAUNCHER = load_clvm_maybe_recompile("singleton_launcher.clvm")
+P2_SINGLETON_MOD = load_clvm_maybe_recompile("p2_singleton.clsp")
+P2_SINGLETON_OR_DELAYED_MOD = load_clvm_maybe_recompile("p2_singleton_or_delayed_puzhash.clsp")
+SINGLETON_LAUNCHER = load_clvm_maybe_recompile("singleton_launcher.clsp")
SINGLETON_LAUNCHER_HASH = SINGLETON_LAUNCHER.get_tree_hash()
ESCAPE_VALUE = -113
MELT_CONDITION = [ConditionOpcode.CREATE_COIN, 0, ESCAPE_VALUE]
diff --git a/chia/wallet/puzzles/singleton_top_layer_v1_1.py b/chia/wallet/puzzles/singleton_top_layer_v1_1.py
--- a/chia/wallet/puzzles/singleton_top_layer_v1_1.py
+++ b/chia/wallet/puzzles/singleton_top_layer_v1_1.py
@@ -13,11 +13,11 @@
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
from chia.wallet.uncurried_puzzle import UncurriedPuzzle
-SINGLETON_MOD = load_clvm_maybe_recompile("singleton_top_layer_v1_1.clvm")
+SINGLETON_MOD = load_clvm_maybe_recompile("singleton_top_layer_v1_1.clsp")
SINGLETON_MOD_HASH = SINGLETON_MOD.get_tree_hash()
-P2_SINGLETON_MOD = load_clvm_maybe_recompile("p2_singleton.clvm")
-P2_SINGLETON_OR_DELAYED_MOD = load_clvm_maybe_recompile("p2_singleton_or_delayed_puzhash.clvm")
-SINGLETON_LAUNCHER = load_clvm_maybe_recompile("singleton_launcher.clvm")
+P2_SINGLETON_MOD = load_clvm_maybe_recompile("p2_singleton.clsp")
+P2_SINGLETON_OR_DELAYED_MOD = load_clvm_maybe_recompile("p2_singleton_or_delayed_puzhash.clsp")
+SINGLETON_LAUNCHER = load_clvm_maybe_recompile("singleton_launcher.clsp")
SINGLETON_LAUNCHER_HASH = SINGLETON_LAUNCHER.get_tree_hash()
ESCAPE_VALUE = -113
MELT_CONDITION = [ConditionOpcode.CREATE_COIN, 0, ESCAPE_VALUE]
diff --git a/chia/wallet/puzzles/tails.py b/chia/wallet/puzzles/tails.py
--- a/chia/wallet/puzzles/tails.py
+++ b/chia/wallet/puzzles/tails.py
@@ -15,14 +15,15 @@
)
from chia.wallet.cat_wallet.lineage_store import CATLineageStore
from chia.wallet.lineage_proof import LineageProof
+from chia.wallet.payment import Payment
from chia.wallet.puzzles.cat_loader import CAT_MOD
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
from chia.wallet.transaction_record import TransactionRecord
-GENESIS_BY_ID_MOD = load_clvm_maybe_recompile("genesis_by_coin_id.clvm")
-GENESIS_BY_PUZHASH_MOD = load_clvm_maybe_recompile("genesis_by_puzzle_hash.clvm")
-EVERYTHING_WITH_SIG_MOD = load_clvm_maybe_recompile("everything_with_signature.clvm")
-DELEGATED_LIMITATIONS_MOD = load_clvm_maybe_recompile("delegated_tail.clvm")
+GENESIS_BY_ID_MOD = load_clvm_maybe_recompile("genesis_by_coin_id.clsp")
+GENESIS_BY_PUZHASH_MOD = load_clvm_maybe_recompile("genesis_by_puzzle_hash.clsp")
+EVERYTHING_WITH_SIG_MOD = load_clvm_maybe_recompile("everything_with_signature.clsp")
+DELEGATED_LIMITATIONS_MOD = load_clvm_maybe_recompile("delegated_tail.clsp")
class LimitationsProgram:
@@ -91,9 +92,7 @@ async def generate_issuance_bundle(cls, wallet, _: Dict, amount: uint64) -> Tupl
inner_solution = wallet.standard_wallet.add_condition_to_solution(
Program.to([51, 0, -113, tail, []]),
- wallet.standard_wallet.make_solution(
- primaries=[{"puzzlehash": cat_inner.get_tree_hash(), "amount": amount}],
- ),
+ wallet.standard_wallet.make_solution(primaries=[Payment(cat_inner.get_tree_hash(), amount)]),
)
eve_spend = unsigned_spend_bundle_for_spendable_cats(
CAT_MOD,
diff --git a/chia/wallet/settings/default_settings.py b/chia/wallet/settings/default_settings.py
deleted file mode 100644
--- a/chia/wallet/settings/default_settings.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from __future__ import annotations
-
-from chia.wallet.settings.settings_objects import BackupInitialized
-
-default_backup_initialized = BackupInitialized(False, False, False, True)
-
-default_settings = {BackupInitialized.__name__: default_backup_initialized}
diff --git a/chia/wallet/settings/settings_objects.py b/chia/wallet/settings/settings_objects.py
deleted file mode 100644
--- a/chia/wallet/settings/settings_objects.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from __future__ import annotations
-
-from dataclasses import dataclass
-
-from chia.util.streamable import Streamable, streamable
-
-
-@streamable
-@dataclass(frozen=True)
-class BackupInitialized(Streamable):
- """
- Stores user decision regarding import of backup info
- """
-
- user_initialized: bool # Stores if user made a selection in UI. (Skip vs Import backup)
- user_skipped: bool # Stores if user decided to skip import of backup info
- backup_info_imported: bool # Stores if backup info has been imported
- new_wallet: bool # Stores if this wallet is newly created / not restored from backup
diff --git a/chia/wallet/settings/user_settings.py b/chia/wallet/settings/user_settings.py
deleted file mode 100644
--- a/chia/wallet/settings/user_settings.py
+++ /dev/null
@@ -1,43 +0,0 @@
-from __future__ import annotations
-
-from typing import Any, Dict
-
-from chia.wallet.key_val_store import KeyValStore
-from chia.wallet.settings.default_settings import default_settings
-from chia.wallet.settings.settings_objects import BackupInitialized
-
-
-class UserSettings:
- settings: Dict[str, Any]
- basic_store: KeyValStore
-
- @staticmethod
- async def create(
- store: KeyValStore,
- name: str = None,
- ):
- self = UserSettings()
- self.basic_store = store
- self.settings = {}
- await self.load_store()
- return self
-
- def _keys(self):
- all_keys = [BackupInitialized]
- return all_keys
-
- async def load_store(self):
- keys = self._keys()
- for setting in keys:
- name = setting.__name__
- object = await self.basic_store.get_object(name, BackupInitialized)
- if object is None:
- object = default_settings[name]
-
- assert object is not None
- self.settings[name] = object
-
- async def setting_updated(self, setting: Any):
- name = setting.__class__.__name__
- await self.basic_store.set_object(name, setting)
- self.settings[name] = setting
diff --git a/chia/wallet/sign_coin_spends.py b/chia/wallet/sign_coin_spends.py
--- a/chia/wallet/sign_coin_spends.py
+++ b/chia/wallet/sign_coin_spends.py
@@ -39,13 +39,7 @@ async def sign_coin_spends(
msg_list: List[bytes] = []
for coin_spend in coin_spends:
# Get AGG_SIG conditions
- err, conditions_dict, cost = conditions_dict_for_solution(
- coin_spend.puzzle_reveal, coin_spend.solution, max_cost
- )
- if err or conditions_dict is None:
- error_msg = f"Sign transaction failed, con:{conditions_dict}, error: {err}"
- raise ValueError(error_msg)
-
+ conditions_dict = conditions_dict_for_solution(coin_spend.puzzle_reveal, coin_spend.solution, max_cost)
# Create signature
for pk_bytes, msg in pkm_pairs_for_conditions_dict(conditions_dict, coin_spend.coin.name(), additional_data):
pk = blspy.G1Element.from_bytes(pk_bytes)
diff --git a/chia/wallet/singleton.py b/chia/wallet/singleton.py
--- a/chia/wallet/singleton.py
+++ b/chia/wallet/singleton.py
@@ -7,14 +7,14 @@
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
from chia.wallet.util.curry_and_treehash import calculate_hash_of_quoted_mod_hash, curry_and_treehash
-SINGLETON_TOP_LAYER_MOD = load_clvm_maybe_recompile("singleton_top_layer_v1_1.clvm")
+SINGLETON_TOP_LAYER_MOD = load_clvm_maybe_recompile("singleton_top_layer_v1_1.clsp")
SINGLETON_TOP_LAYER_MOD_HASH = SINGLETON_TOP_LAYER_MOD.get_tree_hash()
SINGLETON_TOP_LAYER_MOD_HASH_QUOTED = calculate_hash_of_quoted_mod_hash(SINGLETON_TOP_LAYER_MOD_HASH)
-LAUNCHER_PUZZLE = load_clvm_maybe_recompile("singleton_launcher.clvm")
-LAUNCHER_PUZZLE_HASH = LAUNCHER_PUZZLE.get_tree_hash()
+SINGLETON_LAUNCHER_PUZZLE = load_clvm_maybe_recompile("singleton_launcher.clsp")
+SINGLETON_LAUNCHER_PUZZLE_HASH = SINGLETON_LAUNCHER_PUZZLE.get_tree_hash()
-def get_innerpuzzle_from_puzzle(puzzle: Program) -> Optional[Program]:
+def get_inner_puzzle_from_singleton(puzzle: Program) -> Optional[Program]:
"""
Extract the inner puzzle of a singleton
:param puzzle: Singleton puzzle
@@ -39,7 +39,7 @@ def is_singleton(inner_f: Program) -> bool:
return inner_f == SINGLETON_TOP_LAYER_MOD
-def create_fullpuz_hash(innerpuz_hash: bytes32, launcher_id: bytes32) -> bytes32:
+def create_singleton_puzzle_hash(innerpuz_hash: bytes32, launcher_id: bytes32) -> bytes32:
"""
Return Hash ID of the whole Singleton Puzzle
:param innerpuz_hash: Singleton inner puzzle tree hash
@@ -47,12 +47,12 @@ def create_fullpuz_hash(innerpuz_hash: bytes32, launcher_id: bytes32) -> bytes32
:return: Singleton full puzzle hash
"""
# singleton_struct = (MOD_HASH . (LAUNCHER_ID . LAUNCHER_PUZZLE_HASH))
- singleton_struct = Program.to((SINGLETON_TOP_LAYER_MOD_HASH, (launcher_id, LAUNCHER_PUZZLE_HASH)))
+ singleton_struct = Program.to((SINGLETON_TOP_LAYER_MOD_HASH, (launcher_id, SINGLETON_LAUNCHER_PUZZLE_HASH)))
return curry_and_treehash(SINGLETON_TOP_LAYER_MOD_HASH_QUOTED, singleton_struct.get_tree_hash(), innerpuz_hash)
-def create_fullpuz(innerpuz: Program, launcher_id: bytes32) -> Program:
+def create_singleton_puzzle(innerpuz: Program, launcher_id: bytes32) -> Program:
"""
Create a full Singleton puzzle
:param innerpuz: Singleton inner puzzle
@@ -60,5 +60,5 @@ def create_fullpuz(innerpuz: Program, launcher_id: bytes32) -> Program:
:return: Singleton full puzzle
"""
# singleton_struct = (MOD_HASH . (LAUNCHER_ID . LAUNCHER_PUZZLE_HASH))
- singleton_struct = Program.to((SINGLETON_TOP_LAYER_MOD_HASH, (launcher_id, LAUNCHER_PUZZLE_HASH)))
+ singleton_struct = Program.to((SINGLETON_TOP_LAYER_MOD_HASH, (launcher_id, SINGLETON_LAUNCHER_PUZZLE_HASH)))
return SINGLETON_TOP_LAYER_MOD.curry(singleton_struct, innerpuz)
diff --git a/chia/wallet/trade_manager.py b/chia/wallet/trade_manager.py
--- a/chia/wallet/trade_manager.py
+++ b/chia/wallet/trade_manager.py
@@ -28,17 +28,18 @@
from chia.wallet.trading.trade_status import TradeStatus
from chia.wallet.trading.trade_store import TradeStore
from chia.wallet.transaction_record import TransactionRecord
+from chia.wallet.util.query_filter import HashFilter
from chia.wallet.util.transaction_type import TransactionType
from chia.wallet.util.wallet_types import WalletType
from chia.wallet.wallet import Wallet
from chia.wallet.wallet_coin_record import WalletCoinRecord
-OFFER_MOD = load_clvm_maybe_recompile("settlement_payments.clvm")
+OFFER_MOD = load_clvm_maybe_recompile("settlement_payments.clsp")
class TradeManager:
"""
- This class is a driver for creating and accepting settlement_payments.clvm style offers.
+ This class is a driver for creating and accepting settlement_payments.clsp style offers.
By default, standard XCH is supported but to support other types of assets you must implement certain functions on
the asset's wallet as well as create a driver for its puzzle(s). Here is a guide to integrating a new types of
@@ -138,8 +139,10 @@ async def coins_of_interest_farmed(
offer = Offer.from_bytes(trade.offer)
primary_coin_ids = [c.name() for c in offer.removals()]
# TODO: Add `WalletCoinStore.get_coins`.
- our_coin_records = await self.wallet_state_manager.coin_store.get_coin_records(primary_coin_ids)
- our_primary_coins: List[Coin] = [cr.coin for cr in our_coin_records.values()]
+ result = await self.wallet_state_manager.coin_store.get_coin_records(
+ coin_id_filter=HashFilter.include(primary_coin_ids)
+ )
+ our_primary_coins: List[Coin] = [cr.coin for cr in result.records]
our_additions: List[Coin] = list(
filter(lambda c: offer.get_root_removal(c) in our_primary_coins, offer.additions())
)
@@ -194,7 +197,11 @@ async def get_locked_coins(self) -> Dict[bytes32, WalletCoinRecord]:
# - The cast here is required for now because TradeManager.wallet_state_manager is hinted as Any.
return cast(
Dict[bytes32, WalletCoinRecord],
- await self.wallet_state_manager.coin_store.get_coin_records(coins_of_interest),
+ (
+ await self.wallet_state_manager.coin_store.get_coin_records(
+ coin_id_filter=HashFilter.include(coins_of_interest)
+ )
+ ).coin_id_to_record,
)
async def get_all_trades(self) -> List[TradeRecord]:
@@ -507,8 +514,12 @@ async def _create_offer_for_ids(
wallet = await self.wallet_state_manager.get_wallet_for_asset_id(asset_id.hex())
if not callable(getattr(wallet, "get_coins_to_offer", None)): # ATTENTION: new wallets
raise ValueError(f"Cannot offer coins from wallet id {wallet.id()}")
+ # For the XCH wallet also include the fee amount to the coins we use to pay this offer
+ amount_to_select = abs(amount)
+ if wallet.type() == WalletType.STANDARD_WALLET:
+ amount_to_select += fee
coins_to_offer[id] = await wallet.get_coins_to_offer(
- asset_id, uint64(abs(amount)), min_coin_amount, max_coin_amount
+ asset_id, uint64(amount_to_select), min_coin_amount, max_coin_amount
)
# Note: if we use check_for_special_offer_making, this is not used.
elif amount == 0:
@@ -547,7 +558,10 @@ async def _create_offer_for_ids(
all_transactions: List[TransactionRecord] = []
fee_left_to_pay: uint64 = fee
- for id, selected_coins in coins_to_offer.items():
+ # The access of the sorted keys here makes sure we create the XCH transaction first to make sure we pay fee
+ # with the XCH side of the offer and don't create an extra fee transaction in other wallets.
+ for id in sorted(coins_to_offer.keys()):
+ selected_coins = coins_to_offer[id]
if isinstance(id, int):
wallet = self.wallet_state_manager.wallets[id]
else:
@@ -640,11 +654,12 @@ async def calculate_tx_records_for_offer(self, offer: Offer, validate: bool) ->
addition_dict: Dict[uint32, List[Coin]] = {}
for addition in additions:
- wallet_info = await self.wallet_state_manager.get_wallet_id_for_puzzle_hash(addition.puzzle_hash)
- if wallet_info is not None:
- wallet_id, _ = wallet_info
+ wallet_identifier = await self.wallet_state_manager.get_wallet_identifier_for_puzzle_hash(
+ addition.puzzle_hash
+ )
+ if wallet_identifier is not None:
if addition.parent_coin_info in settlement_coin_ids:
- wallet = self.wallet_state_manager.wallets[wallet_id]
+ wallet = self.wallet_state_manager.wallets[wallet_identifier.id]
to_puzzle_hash = await wallet.convert_puzzle_hash(addition.puzzle_hash) # ATTENTION: new wallets
txs.append(
TransactionRecord(
@@ -658,7 +673,7 @@ async def calculate_tx_records_for_offer(self, offer: Offer, validate: bool) ->
spend_bundle=None,
additions=[addition],
removals=[],
- wallet_id=wallet_id,
+ wallet_id=wallet_identifier.id,
sent_to=[],
trade_id=offer.name(),
type=uint32(TransactionType.INCOMING_TRADE.value),
@@ -667,17 +682,18 @@ async def calculate_tx_records_for_offer(self, offer: Offer, validate: bool) ->
)
)
else: # This is change
- addition_dict.setdefault(wallet_id, [])
- addition_dict[wallet_id].append(addition)
+ addition_dict.setdefault(wallet_identifier.id, [])
+ addition_dict[wallet_identifier.id].append(addition)
# While we want additions to show up as separate records, removals of the same wallet should show as one
removal_dict: Dict[uint32, List[Coin]] = {}
for removal in removals:
- wallet_info = await self.wallet_state_manager.get_wallet_id_for_puzzle_hash(removal.puzzle_hash)
- if wallet_info is not None:
- wallet_id, _ = wallet_info
- removal_dict.setdefault(wallet_id, [])
- removal_dict[wallet_id].append(removal)
+ wallet_identifier = await self.wallet_state_manager.get_wallet_identifier_for_puzzle_hash(
+ removal.puzzle_hash
+ )
+ if wallet_identifier is not None:
+ removal_dict.setdefault(wallet_identifier.id, [])
+ removal_dict[wallet_identifier.id].append(removal)
all_removals: List[bytes32] = [r.name() for removals in removal_dict.values() for r in removals]
diff --git a/chia/wallet/trading/offer.py b/chia/wallet/trading/offer.py
--- a/chia/wallet/trading/offer.py
+++ b/chia/wallet/trading/offer.py
@@ -34,8 +34,8 @@
lowest_best_version,
)
-OFFER_MOD_OLD = load_clvm_maybe_recompile("settlement_payments_old.clvm")
-OFFER_MOD = load_clvm_maybe_recompile("settlement_payments.clvm")
+OFFER_MOD_OLD = load_clvm_maybe_recompile("settlement_payments_old.clsp")
+OFFER_MOD = load_clvm_maybe_recompile("settlement_payments.clsp")
OFFER_MOD_OLD_HASH = OFFER_MOD_OLD.get_tree_hash()
OFFER_MOD_HASH = OFFER_MOD.get_tree_hash()
ZERO_32 = bytes32([0] * 32)
@@ -459,7 +459,7 @@ def to_valid_spend(self, arbitrage_ph: Optional[bytes32] = None) -> SpendBundle:
if arbitrage_amount > 0:
assert arbitrage_amount is not None
assert arbitrage_ph is not None
- all_payments.append(NotarizedPayment(arbitrage_ph, uint64(arbitrage_amount), []))
+ all_payments.append(NotarizedPayment(arbitrage_ph, uint64(arbitrage_amount)))
# Some assets need to know about siblings so we need to collect all spends first to be able to use them
coin_to_spend_dict: Dict[Coin, CoinSpend] = {}
diff --git a/chia/wallet/transaction_record.py b/chia/wallet/transaction_record.py
--- a/chia/wallet/transaction_record.py
+++ b/chia/wallet/transaction_record.py
@@ -16,6 +16,8 @@
T = TypeVar("T")
+minimum_send_attempts = 6
+
@dataclass
class ItemAndTransactionRecords(Generic[T]):
@@ -112,14 +114,13 @@ def to_json_dict_convenience(self, config: Dict) -> Dict:
return formatted
def is_valid(self) -> bool:
- past_receipts = self.sent_to
- if len(past_receipts) < 6:
+ if len(self.sent_to) < minimum_send_attempts:
# we haven't tried enough peers yet
return True
- if any([x[0] for x in past_receipts if x[0] == MempoolInclusionStatus.SUCCESS.value]):
+ if any(x[1] == MempoolInclusionStatus.SUCCESS for x in self.sent_to):
# we managed to push it to mempool at least once
return True
- if any([x[1] for x in past_receipts if x[1] in (Err.INVALID_FEE_LOW_FEE, Err.INVALID_FEE_TOO_CLOSE_TO_ZERO)]):
+ if any(x[2] in (Err.INVALID_FEE_LOW_FEE.name, Err.INVALID_FEE_TOO_CLOSE_TO_ZERO.name) for x in self.sent_to):
# we tried to push it to mempool and got a fee error so it's a temporary error
return True
return False
diff --git a/chia/wallet/util/debug_spend_bundle.py b/chia/wallet/util/debug_spend_bundle.py
--- a/chia/wallet/util/debug_spend_bundle.py
+++ b/chia/wallet/util/debug_spend_bundle.py
@@ -73,43 +73,40 @@ def debug_spend_bundle(spend_bundle, agg_sig_additional_data=DEFAULT_CONSTANTS.A
print(f" with id {coin_name.hex()}")
print()
print(f"\nbrun -y main.sym '{bu_disassemble(puzzle_reveal)}' '{bu_disassemble(solution)}'")
- error, conditions, cost = conditions_dict_for_solution(puzzle_reveal, solution, INFINITE_COST)
- if error:
- print(f"*** error {error}")
- elif conditions is not None:
- for pk_bytes, m in pkm_pairs_for_conditions_dict(conditions, coin_name, agg_sig_additional_data):
- pks.append(G1Element.from_bytes(pk_bytes))
- msgs.append(m)
- print()
- cost, r = puzzle_reveal.run_with_cost(INFINITE_COST, solution) # type: ignore
- print(disassemble(r))
- print()
- if conditions and len(conditions) > 0:
- print("grouped conditions:")
- for condition_programs in conditions.values():
- print()
- for c in condition_programs:
- if len(c.vars) == 1:
- as_prog = Program.to([c.opcode, c.vars[0]])
- if len(c.vars) == 2:
- as_prog = Program.to([c.opcode, c.vars[0], c.vars[1]])
- print(f" {disassemble(as_prog)}")
- created_coin_announcements.extend(
- [coin_name] + _.vars for _ in conditions.get(ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, [])
- )
- asserted_coin_announcements.extend(
- [_.vars[0].hex() for _ in conditions.get(ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, [])]
- )
- created_puzzle_announcements.extend(
- [puzzle_reveal.get_tree_hash()] + _.vars
- for _ in conditions.get(ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, [])
- )
- asserted_puzzle_announcements.extend(
- [_.vars[0].hex() for _ in conditions.get(ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, [])]
- )
+ conditions = conditions_dict_for_solution(puzzle_reveal, solution, INFINITE_COST)
+ for pk_bytes, m in pkm_pairs_for_conditions_dict(conditions, coin_name, agg_sig_additional_data):
+ pks.append(G1Element.from_bytes(pk_bytes))
+ msgs.append(m)
+ print()
+ cost, r = puzzle_reveal.run_with_cost(INFINITE_COST, solution)
+ print(disassemble(r))
+ print()
+ if conditions and len(conditions) > 0:
+ print("grouped conditions:")
+ for condition_programs in conditions.values():
print()
- else:
- print("(no output conditions generated)")
+ for c in condition_programs:
+ if len(c.vars) == 1:
+ as_prog = Program.to([c.opcode, c.vars[0]])
+ if len(c.vars) == 2:
+ as_prog = Program.to([c.opcode, c.vars[0], c.vars[1]])
+ print(f" {disassemble(as_prog)}")
+ created_coin_announcements.extend(
+ [coin_name] + _.vars for _ in conditions.get(ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, [])
+ )
+ asserted_coin_announcements.extend(
+ [_.vars[0].hex() for _ in conditions.get(ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, [])]
+ )
+ created_puzzle_announcements.extend(
+ [puzzle_reveal.get_tree_hash()] + _.vars
+ for _ in conditions.get(ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, [])
+ )
+ asserted_puzzle_announcements.extend(
+ [_.vars[0].hex() for _ in conditions.get(ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, [])]
+ )
+ print()
+ else:
+ print("(no output conditions generated)")
print()
print("-------")
diff --git a/chia/wallet/util/notifications.py b/chia/wallet/util/notifications.py
--- a/chia/wallet/util/notifications.py
+++ b/chia/wallet/util/notifications.py
@@ -5,7 +5,7 @@
from chia.util.ints import uint64
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
-NOTIFICATION_MOD = load_clvm_maybe_recompile("notification.clvm")
+NOTIFICATION_MOD = load_clvm_maybe_recompile("notification.clsp")
def construct_notification(target: bytes32, amount: uint64) -> Program:
diff --git a/chia/wallet/util/puzzle_compression.py b/chia/wallet/util/puzzle_compression.py
--- a/chia/wallet/util/puzzle_compression.py
+++ b/chia/wallet/util/puzzle_compression.py
@@ -20,8 +20,8 @@
"ff02ffff01ff02ff5effff04ff02ffff04ffff04ff05ffff04ffff0bff2cff0580ffff04ff0bff80808080ffff04ffff02ff17ff2f80ffff04ff5fffff04ffff02ff2effff04ff02ffff04ff17ff80808080ffff04ffff0bff82027fff82057fff820b7f80ffff04ff81bfffff04ff82017fffff04ff8202ffffff04ff8205ffffff04ff820bffff80808080808080808080808080ffff04ffff01ffffffff81ca3dff46ff0233ffff3c04ff01ff0181cbffffff02ff02ffff03ff05ffff01ff02ff32ffff04ff02ffff04ff0dffff04ffff0bff22ffff0bff2cff3480ffff0bff22ffff0bff22ffff0bff2cff5c80ff0980ffff0bff22ff0bffff0bff2cff8080808080ff8080808080ffff010b80ff0180ffff02ffff03ff0bffff01ff02ffff03ffff09ffff02ff2effff04ff02ffff04ff13ff80808080ff820b9f80ffff01ff02ff26ffff04ff02ffff04ffff02ff13ffff04ff5fffff04ff17ffff04ff2fffff04ff81bfffff04ff82017fffff04ff1bff8080808080808080ffff04ff82017fff8080808080ffff01ff088080ff0180ffff01ff02ffff03ff17ffff01ff02ffff03ffff20ff81bf80ffff0182017fffff01ff088080ff0180ffff01ff088080ff018080ff0180ffff04ffff04ff05ff2780ffff04ffff10ff0bff5780ff778080ff02ffff03ff05ffff01ff02ffff03ffff09ffff02ffff03ffff09ff11ff7880ffff0159ff8080ff0180ffff01818f80ffff01ff02ff7affff04ff02ffff04ff0dffff04ff0bffff04ffff04ff81b9ff82017980ff808080808080ffff01ff02ff5affff04ff02ffff04ffff02ffff03ffff09ff11ff7880ffff01ff04ff78ffff04ffff02ff36ffff04ff02ffff04ff13ffff04ff29ffff04ffff0bff2cff5b80ffff04ff2bff80808080808080ff398080ffff01ff02ffff03ffff09ff11ff2480ffff01ff04ff24ffff04ffff0bff20ff2980ff398080ffff010980ff018080ff0180ffff04ffff02ffff03ffff09ff11ff7880ffff0159ff8080ff0180ffff04ffff02ff7affff04ff02ffff04ff0dffff04ff0bffff04ff17ff808080808080ff80808080808080ff0180ffff01ff04ff80ffff04ff80ff17808080ff0180ffffff02ffff03ff05ffff01ff04ff09ffff02ff26ffff04ff02ffff04ff0dffff04ff0bff808080808080ffff010b80ff0180ff0bff22ffff0bff2cff5880ffff0bff22ffff0bff22ffff0bff2cff5c80ff0580ffff0bff22ffff02ff32ffff04ff02ffff04ff07ffff04ffff0bff2cff2c80ff8080808080ffff0bff2cff8080808080ffff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff2effff04ff02ffff04ff09ff80808080ffff02ff2effff04ff02ffff04ff0dff8080808080ffff01ff0bff2cff058080ff0180ffff04ffff04ff28ffff04ff5fff808080ffff02ff7effff04ff02ffff04ffff04ffff04ff2fff0580ffff04ff5fff82017f8080ffff04ffff02ff7affff04ff02ffff04ff0bffff04ff05ffff01ff808080808080ffff04ff17ffff04ff81bfffff04ff82017fffff04ffff0bff8204ffffff02ff36ffff04ff02ffff04ff09ffff04ff820affffff04ffff0bff2cff2d80ffff04ff15ff80808080808080ff8216ff80ffff04ff8205ffffff04ff820bffff808080808080808080808080ff02ff2affff04ff02ffff04ff5fffff04ff3bffff04ffff02ffff03ff17ffff01ff09ff2dffff0bff27ffff02ff36ffff04ff02ffff04ff29ffff04ff57ffff04ffff0bff2cff81b980ffff04ff59ff80808080808080ff81b78080ff8080ff0180ffff04ff17ffff04ff05ffff04ff8202ffffff04ffff04ffff04ff24ffff04ffff0bff7cff2fff82017f80ff808080ffff04ffff04ff30ffff04ffff0bff81bfffff0bff7cff15ffff10ff82017fffff11ff8202dfff2b80ff8202ff808080ff808080ff138080ff80808080808080808080ff018080" # noqa
)
-OFFER_MOD_OLD = load_clvm_maybe_recompile("settlement_payments_old.clvm")
-OFFER_MOD = load_clvm_maybe_recompile("settlement_payments.clvm")
+OFFER_MOD_OLD = load_clvm_maybe_recompile("settlement_payments_old.clsp")
+OFFER_MOD = load_clvm_maybe_recompile("settlement_payments.clsp")
# For backwards compatibility to work, we must assume that these mods (already deployed) will not change
# In the case that they do change and we don't support the old asset then we need to keep around the legacy module
diff --git a/chia/wallet/util/query_filter.py b/chia/wallet/util/query_filter.py
new file mode 100644
--- /dev/null
+++ b/chia/wallet/util/query_filter.py
@@ -0,0 +1,60 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+from enum import IntEnum
+from typing import List
+
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.util.ints import uint8, uint64
+from chia.util.streamable import Streamable, streamable
+from chia.wallet.util.transaction_type import TransactionType
+
+
+class FilterMode(IntEnum):
+ include = 1
+ exclude = 2
+
+
+@streamable
+@dataclass(frozen=True)
+class TransactionTypeFilter(Streamable):
+ values: List[uint8]
+ mode: uint8 # FilterMode
+
+ @classmethod
+ def include(cls, values: List[TransactionType]) -> TransactionTypeFilter:
+ return cls([uint8(t.value) for t in values], uint8(FilterMode.include))
+
+ @classmethod
+ def exclude(cls, values: List[TransactionType]) -> TransactionTypeFilter:
+ return cls([uint8(t.value) for t in values], uint8(FilterMode.exclude))
+
+
+@streamable
+@dataclass(frozen=True)
+class AmountFilter(Streamable):
+ values: List[uint64]
+ mode: uint8 # FilterMode
+
+ @classmethod
+ def include(cls, values: List[uint64]) -> AmountFilter:
+ return cls(values, mode=uint8(FilterMode.include))
+
+ @classmethod
+ def exclude(cls, values: List[uint64]) -> AmountFilter:
+ return cls(values, mode=uint8(FilterMode.exclude))
+
+
+@streamable
+@dataclass(frozen=True)
+class HashFilter(Streamable):
+ values: List[bytes32]
+ mode: uint8 # FilterMode
+
+ @classmethod
+ def include(cls, values: List[bytes32]) -> HashFilter:
+ return cls(values, mode=uint8(FilterMode.include))
+
+ @classmethod
+ def exclude(cls, values: List[bytes32]) -> HashFilter:
+ return cls(values, mode=uint8(FilterMode.exclude))
diff --git a/chia/wallet/util/wallet_sync_utils.py b/chia/wallet/util/wallet_sync_utils.py
--- a/chia/wallet/util/wallet_sync_utils.py
+++ b/chia/wallet/util/wallet_sync_utils.py
@@ -9,10 +9,11 @@
from chia.consensus.constants import ConsensusConstants
from chia.full_node.full_node_api import FullNodeAPI
-from chia.protocols import wallet_protocol
from chia.protocols.shared_protocol import Capability
from chia.protocols.wallet_protocol import (
CoinState,
+ RegisterForCoinUpdates,
+ RegisterForPhUpdates,
RejectAdditionsRequest,
RejectBlockHeaders,
RejectHeaderBlocks,
@@ -20,10 +21,12 @@
RequestAdditions,
RequestBlockHeaders,
RequestHeaderBlocks,
+ RequestPuzzleSolution,
RequestRemovals,
RespondAdditions,
RespondBlockHeaders,
RespondHeaderBlocks,
+ RespondPuzzleSolution,
RespondRemovals,
RespondToCoinUpdates,
RespondToPhUpdates,
@@ -31,6 +34,7 @@
from chia.server.ws_connection import WSChiaConnection
from chia.types.blockchain_format.coin import Coin, hash_coin_ids
from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_spend import CoinSpend
from chia.types.full_block import FullBlock
from chia.types.header_block import HeaderBlock
from chia.util.ints import uint32
@@ -70,12 +74,12 @@ async def subscribe_to_phs(
"""
Tells full nodes that we are interested in puzzle hashes, and returns the response.
"""
- msg = wallet_protocol.RegisterForPhUpdates(puzzle_hashes, uint32(max(min_height, uint32(0))))
+ msg = RegisterForPhUpdates(puzzle_hashes, uint32(max(min_height, uint32(0))))
all_coins_state: Optional[RespondToPhUpdates] = await peer.call_api(
FullNodeAPI.register_interest_in_puzzle_hash, msg, timeout=300
)
if all_coins_state is None:
- raise ValueError(f"None response from peer {peer.peer_host} for register_interest_in_puzzle_hash")
+ raise ValueError(f"None response from peer {peer.peer_info.host} for register_interest_in_puzzle_hash")
return all_coins_state.coin_states
@@ -87,13 +91,13 @@ async def subscribe_to_coin_updates(
"""
Tells full nodes that we are interested in coin ids, and returns the response.
"""
- msg = wallet_protocol.RegisterForCoinUpdates(coin_names, uint32(max(0, min_height)))
+ msg = RegisterForCoinUpdates(coin_names, uint32(max(0, min_height)))
all_coins_state: Optional[RespondToCoinUpdates] = await peer.call_api(
FullNodeAPI.register_interest_in_coin, msg, timeout=300
)
if all_coins_state is None:
- raise ValueError(f"None response from peer {peer.peer_host} for register_interest_in_coin")
+ raise ValueError(f"None response from peer {peer.peer_info.host} for register_interest_in_coin")
return all_coins_state.coin_states
@@ -314,6 +318,17 @@ def last_change_height_cs(cs: CoinState) -> uint32:
return uint32(0)
+def sort_coin_states(coin_states: List[CoinState]) -> List[CoinState]:
+ return sorted(
+ coin_states,
+ key=lambda coin_state: (
+ last_change_height_cs(coin_state),
+ 0 if coin_state.created_height is None else coin_state.created_height,
+ 0 if coin_state.spent_height is None else coin_state.spent_height,
+ ),
+ )
+
+
def get_block_header(block: FullBlock) -> HeaderBlock:
return HeaderBlock(
block.finished_sub_slots,
@@ -408,3 +423,25 @@ async def fetch_header_blocks_in_range(
assert res_h_blocks is not None
blocks.extend([bl for bl in res_h_blocks.header_blocks if bl.height >= start])
return blocks
+
+
+async def fetch_coin_spend(height: uint32, coin: Coin, peer: WSChiaConnection) -> CoinSpend:
+ solution_response = await peer.call_api(
+ FullNodeAPI.request_puzzle_solution, RequestPuzzleSolution(coin.name(), height)
+ )
+ if solution_response is None or not isinstance(solution_response, RespondPuzzleSolution):
+ raise PeerRequestException(f"Was not able to obtain solution {solution_response}")
+ assert solution_response.response.puzzle.get_tree_hash() == coin.puzzle_hash
+ assert solution_response.response.coin_name == coin.name()
+
+ return CoinSpend(
+ coin,
+ solution_response.response.puzzle,
+ solution_response.response.solution,
+ )
+
+
+async def fetch_coin_spend_for_coin_state(coin_state: CoinState, peer: WSChiaConnection) -> CoinSpend:
+ if coin_state.spent_height is None:
+ raise ValueError("coin_state.coin must be spent coin")
+ return await fetch_coin_spend(uint32(coin_state.spent_height), coin_state.coin, peer)
diff --git a/chia/wallet/util/wallet_types.py b/chia/wallet/util/wallet_types.py
--- a/chia/wallet/util/wallet_types.py
+++ b/chia/wallet/util/wallet_types.py
@@ -1,12 +1,14 @@
from __future__ import annotations
+from dataclasses import dataclass
from enum import IntEnum
-from typing import List
+from typing import TYPE_CHECKING
-from typing_extensions import TypedDict
+from chia.util.ints import uint8, uint32
+from chia.util.streamable import Streamable, streamable
-from chia.types.blockchain_format.sized_bytes import bytes32
-from chia.util.ints import uint64
+if TYPE_CHECKING:
+ from chia.wallet.wallet_protocol import WalletProtocol
class WalletType(IntEnum):
@@ -23,9 +25,27 @@ class WalletType(IntEnum):
NFT = 10
DATA_LAYER = 11
DATA_LAYER_OFFER = 12
+ VC = 13
-class AmountWithPuzzlehash(TypedDict):
- amount: uint64
- puzzlehash: bytes32
- memos: List[bytes]
+class CoinType(IntEnum):
+ NORMAL = 0
+ CLAWBACK = 1
+
+
+@dataclass(frozen=True)
+class WalletIdentifier:
+ id: uint32
+ type: WalletType
+
+ @classmethod
+ def create(cls, wallet: WalletProtocol) -> WalletIdentifier:
+ return cls(wallet.id(), wallet.type())
+
+
+# TODO, Can be replaced with WalletIdentifier if we have streamable enums
+@streamable
+@dataclass(frozen=True)
+class StreamableWalletIdentifier(Streamable):
+ id: uint32
+ type: uint8
diff --git a/chia/wallet/settings/__init__.py b/chia/wallet/vc_wallet/__init__.py
similarity index 100%
rename from chia/wallet/settings/__init__.py
rename to chia/wallet/vc_wallet/__init__.py
diff --git a/chia/wallet/vc_wallet/cr_cat_drivers.py b/chia/wallet/vc_wallet/cr_cat_drivers.py
new file mode 100644
--- /dev/null
+++ b/chia/wallet/vc_wallet/cr_cat_drivers.py
@@ -0,0 +1,603 @@
+from __future__ import annotations
+
+import functools
+from dataclasses import dataclass, replace
+from typing import Iterable, List, Optional, Tuple, Type, TypeVar
+
+from clvm.casts import int_to_bytes
+
+from chia.types.blockchain_format.coin import Coin, coin_as_list
+from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_spend import CoinSpend
+from chia.util.hash import std_hash
+from chia.util.ints import uint64
+from chia.wallet.cat_wallet.cat_utils import construct_cat_puzzle
+from chia.wallet.lineage_proof import LineageProof
+from chia.wallet.payment import Payment
+from chia.wallet.puzzles.cat_loader import CAT_MOD
+from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
+from chia.wallet.puzzles.singleton_top_layer_v1_1 import SINGLETON_LAUNCHER_HASH, SINGLETON_MOD_HASH
+from chia.wallet.uncurried_puzzle import UncurriedPuzzle, uncurry_puzzle
+from chia.wallet.vc_wallet.vc_drivers import (
+ COVENANT_LAYER_HASH,
+ EML_TP_COVENANT_ADAPTER_HASH,
+ EXTIGENT_METADATA_LAYER_HASH,
+ GUARANTEED_NIL_TP,
+ P2_ANNOUNCED_DELEGATED_PUZZLE,
+ create_did_tp,
+ create_eml_covenant_morpher,
+)
+
+# Mods
+CREDENTIAL_RESTRICTION: Program = load_clvm_maybe_recompile(
+ "credential_restriction.clsp",
+ package_or_requirement="chia.wallet.vc_wallet.cr_puzzles",
+ include_standard_libraries=True,
+)
+CREDENTIAL_RESTRICTION_HASH: bytes32 = CREDENTIAL_RESTRICTION.get_tree_hash()
+PROOF_FLAGS_CHECKER: Program = load_clvm_maybe_recompile(
+ "flag_proofs_checker.clsp",
+ package_or_requirement="chia.wallet.vc_wallet.cr_puzzles",
+ include_standard_libraries=True,
+)
+
+
+# Basic drivers
+def construct_cr_layer(
+ authorized_providers: List[bytes32],
+ proofs_checker: Program,
+ inner_puzzle: Program,
+) -> Program:
+ first_curry: Program = CREDENTIAL_RESTRICTION.curry(
+ Program.to(
+ (
+ (
+ (
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ ),
+ (
+ EXTIGENT_METADATA_LAYER_HASH,
+ EML_TP_COVENANT_ADAPTER_HASH,
+ ),
+ ),
+ (
+ Program.to(EXTIGENT_METADATA_LAYER_HASH)
+ .curry(
+ Program.to(EXTIGENT_METADATA_LAYER_HASH).get_tree_hash(),
+ Program.to(None),
+ GUARANTEED_NIL_TP,
+ GUARANTEED_NIL_TP.get_tree_hash(),
+ P2_ANNOUNCED_DELEGATED_PUZZLE,
+ )
+ .get_tree_hash_precalc(
+ EXTIGENT_METADATA_LAYER_HASH, Program.to(EXTIGENT_METADATA_LAYER_HASH).get_tree_hash()
+ ),
+ (
+ Program.to(
+ int_to_bytes(2)
+ + Program.to((1, COVENANT_LAYER_HASH)).get_tree_hash_precalc(COVENANT_LAYER_HASH)
+ ),
+ Program.to(
+ (
+ [
+ 4,
+ (1, create_eml_covenant_morpher(create_did_tp().get_tree_hash())),
+ [4, (1, create_did_tp()), 1],
+ ],
+ None,
+ )
+ ).get_tree_hash(),
+ ),
+ ),
+ ),
+ ),
+ authorized_providers,
+ proofs_checker,
+ )
+ return first_curry.curry(first_curry.get_tree_hash(), inner_puzzle)
+
+
+# Coverage coming with CR-CAT Wallet
+def match_cr_layer(
+ uncurried_puzzle: UncurriedPuzzle,
+) -> Optional[Tuple[List[bytes32], Program, Program]]: # pragma: no cover
+ if uncurried_puzzle.mod == CREDENTIAL_RESTRICTION:
+ extra_uncurried_puzzle = uncurry_puzzle(uncurried_puzzle.mod)
+ return (
+ [bytes32(provider.atom) for provider in extra_uncurried_puzzle.args.at("rf").as_iter()],
+ extra_uncurried_puzzle.args.at("rrf"),
+ uncurried_puzzle.args.at("rf"),
+ )
+ else:
+ return None
+
+
+def solve_cr_layer(
+ proof_of_inclusions: Program,
+ proof_checker_solution: Program,
+ provider_id: bytes32,
+ vc_launcher_id: bytes32,
+ vc_inner_puzhash: bytes32,
+ my_coin_id: bytes32,
+ inner_solution: Program,
+) -> Program:
+ solution: Program = Program.to(
+ [
+ proof_of_inclusions,
+ proof_checker_solution,
+ provider_id,
+ vc_launcher_id,
+ vc_inner_puzhash,
+ my_coin_id,
+ inner_solution,
+ ]
+ )
+ return solution
+
+
+_T_CRCAT = TypeVar("_T_CRCAT", bound="CRCAT")
+
+
+@dataclass(frozen=True)
+class CRCAT:
+ coin: Coin
+ tail_hash: bytes32
+ lineage_proof: LineageProof
+ authorized_providers: List[bytes32]
+ proofs_checker: Program
+ inner_puzzle_hash: bytes32
+
+ @classmethod
+ def launch(
+ cls: Type[_T_CRCAT],
+ # General CAT launching info
+ origin_coin: Coin,
+ payment: Payment,
+ tail: Program,
+ tail_solution: Program,
+ # CR Layer params
+ authorized_providers: List[bytes32],
+ proofs_checker: Program,
+ # Probably never need this but some tail might
+ optional_lineage_proof: Optional[LineageProof] = None,
+ ) -> Tuple[Program, CoinSpend, CRCAT]:
+ """
+ Launch a new CR-CAT from XCH.
+
+ Returns a delegated puzzle to run that creates the eve CAT, an eve coin spend of the CAT, and the expected class
+ representation after all relevant coin spends have been confirmed on chain.
+ """
+ tail_hash: bytes32 = tail.get_tree_hash()
+
+ new_cr_layer_hash: bytes32 = construct_cr_layer(
+ authorized_providers,
+ proofs_checker,
+ payment.puzzle_hash, # type: ignore
+ ).get_tree_hash_precalc(payment.puzzle_hash)
+ new_cat_puzhash: bytes32 = construct_cat_puzzle(
+ CAT_MOD,
+ tail_hash,
+ new_cr_layer_hash, # type: ignore
+ ).get_tree_hash_precalc(new_cr_layer_hash)
+
+ eve_innerpuz: Program = Program.to(
+ (
+ 1,
+ [
+ [51, new_cr_layer_hash, payment.amount, payment.memos],
+ [51, None, -113, tail, tail_solution],
+ [60, None],
+ [1, payment.puzzle_hash, authorized_providers, proofs_checker],
+ ],
+ )
+ )
+ eve_cat_puzzle: Program = construct_cat_puzzle(
+ CAT_MOD,
+ tail_hash,
+ eve_innerpuz,
+ )
+ eve_cat_puzzle_hash: bytes32 = eve_cat_puzzle.get_tree_hash()
+
+ eve_coin: Coin = Coin(origin_coin.name(), eve_cat_puzzle_hash, payment.amount)
+ dpuz: Program = Program.to(
+ (
+ 1,
+ [
+ [51, eve_cat_puzzle_hash, payment.amount],
+ [61, std_hash(eve_coin.name())],
+ ],
+ )
+ )
+
+ eve_proof: LineageProof = LineageProof(
+ eve_coin.parent_coin_info,
+ eve_innerpuz.get_tree_hash(),
+ uint64(eve_coin.amount),
+ )
+
+ return (
+ dpuz,
+ CoinSpend(
+ eve_coin,
+ eve_cat_puzzle,
+ Program.to( # solve_cat
+ [
+ None,
+ optional_lineage_proof,
+ eve_coin.name(),
+ coin_as_list(eve_coin),
+ eve_proof.to_program(),
+ 0,
+ 0,
+ ]
+ ),
+ ),
+ CRCAT(
+ Coin(eve_coin.name(), new_cat_puzhash, payment.amount),
+ tail_hash,
+ eve_proof,
+ authorized_providers,
+ proofs_checker,
+ payment.puzzle_hash,
+ ),
+ )
+
+ def construct_puzzle(self, inner_puzzle: Program) -> Program:
+ return construct_cat_puzzle(
+ CAT_MOD,
+ self.tail_hash,
+ self.construct_cr_layer(inner_puzzle),
+ )
+
+ def construct_cr_layer(self, inner_puzzle: Program) -> Program:
+ return construct_cr_layer(
+ self.authorized_providers,
+ self.proofs_checker,
+ inner_puzzle,
+ )
+
+ @staticmethod
+ def is_cr_cat(puzzle_reveal: UncurriedPuzzle) -> Tuple[bool, str]:
+ """
+ This takes an (uncurried) puzzle reveal and returns a boolean for whether the puzzle is a CR-CAT and an error
+ message for if the puzzle is a mismatch.
+ """
+ if puzzle_reveal.mod != CAT_MOD:
+ return False, "top most layer is not a CAT" # pragma: no cover
+ layer_below_cat: UncurriedPuzzle = uncurry_puzzle(puzzle_reveal.args.at("rrf"))
+ if layer_below_cat.mod != CREDENTIAL_RESTRICTION:
+ return False, "CAT is not credential restricted" # pragma: no cover
+
+ # Coverage coming with CR-CAT Wallet
+ return True, "" # pragma: no cover
+
+ # Coverage coming with CR-CAT Wallet
+ @staticmethod
+ def get_inner_puzzle(puzzle_reveal: UncurriedPuzzle) -> Program: # pragma: no cover
+ return uncurry_puzzle(puzzle_reveal.args.at("rrf")).args.at("rf")
+
+ @staticmethod
+ def get_inner_solution(solution: Program) -> Program: # pragma: no cover
+ return solution.at("f").at("rrrrrrf")
+
+ @classmethod
+ def get_current_from_coin_spend(cls: Type[_T_CRCAT], spend: CoinSpend) -> CRCAT: # pragma: no cover
+ uncurried_puzzle: UncurriedPuzzle = uncurry_puzzle(spend.puzzle_reveal.to_program())
+ first_uncurried_cr_layer: UncurriedPuzzle = uncurry_puzzle(uncurried_puzzle.args.at("rrf"))
+ second_uncurried_cr_layer: UncurriedPuzzle = uncurry_puzzle(first_uncurried_cr_layer.mod)
+ return CRCAT(
+ spend.coin,
+ bytes32(uncurried_puzzle.args.at("rf").atom),
+ spend.solution.to_program().at("rf"),
+ [bytes32(ap.atom) for ap in second_uncurried_cr_layer.args.at("rf").as_iter()],
+ second_uncurried_cr_layer.args.at("rrf"),
+ first_uncurried_cr_layer.args.at("f").get_tree_hash(),
+ )
+
+ @classmethod
+ def get_next_from_coin_spend(
+ cls: Type[_T_CRCAT],
+ parent_spend: CoinSpend,
+ conditions: Optional[Program] = None, # For optimization purposes, the conditions may already have been run
+ ) -> List[CRCAT]:
+ """
+ Given a coin spend, this will return the next CR-CATs that were created as an output of that spend.
+ Inner puzzle output conditions may also be supplied as an optimization.
+
+ This is the main method to use when syncing. It can also sync from a CAT spend that was not a CR-CAT so long
+ as the spend output a remark condition that was (REMARK authorized_providers proofs_checker)
+ """
+ coin_name: bytes32 = parent_spend.coin.name()
+ puzzle: Program = parent_spend.puzzle_reveal.to_program()
+ solution: Program = parent_spend.solution.to_program()
+
+ # Get info by uncurrying
+ _, tail_hash_as_prog, potential_cr_layer = puzzle.uncurry()[1].as_iter()
+ new_inner_puzzle_hash: Optional[bytes32] = None
+ if potential_cr_layer.uncurry()[0].uncurry()[0] != CREDENTIAL_RESTRICTION:
+ # If the previous spend is not a CR-CAT:
+ # we look for a remark condition that tells us the authorized_providers and proofs_checker
+ inner_solution: Program = solution.at("f")
+ if conditions is None:
+ conditions = potential_cr_layer.run(inner_solution)
+ for condition in conditions.as_iter():
+ if condition.at("f") == Program.to(1):
+ new_inner_puzzle_hash = bytes32(condition.at("rf").atom)
+ authorized_providers_as_prog: Program = condition.at("rrf")
+ proofs_checker: Program = condition.at("rrrf")
+ break
+ else:
+ raise ValueError(
+ "Previous spend was not a CR-CAT, nor did it properly remark the CR params"
+ ) # pragma: no cover
+ lineage_inner_puzhash: bytes32 = potential_cr_layer.get_tree_hash()
+ else:
+ # Otherwise the info we need will be in the puzzle reveal
+ cr_first_curry, self_hash_and_innerpuz = potential_cr_layer.uncurry()
+ _, authorized_providers_as_prog, proofs_checker = cr_first_curry.uncurry()[1].as_iter()
+ _, inner_puzzle = self_hash_and_innerpuz.as_iter()
+ inner_solution = solution.at("f").at("rrrrrrf")
+ if conditions is None:
+ conditions = inner_puzzle.run(inner_solution)
+ inner_puzzle_hash: bytes32 = inner_puzzle.get_tree_hash()
+ lineage_inner_puzhash = construct_cr_layer(
+ authorized_providers_as_prog,
+ proofs_checker,
+ inner_puzzle_hash, # type: ignore
+ ).get_tree_hash_precalc(inner_puzzle_hash)
+
+ # Convert all of the old stuff into python
+ authorized_providers: List[bytes32] = [bytes32(p.atom) for p in authorized_providers_as_prog.as_iter()]
+ new_lineage_proof: LineageProof = LineageProof(
+ parent_spend.coin.parent_coin_info,
+ lineage_inner_puzhash,
+ uint64(parent_spend.coin.amount),
+ )
+
+ # Almost complete except the coin's full puzzle hash which we want to use the class method to calculate
+ partially_completed_crcats: List[CRCAT] = [
+ CRCAT(
+ Coin(coin_name, bytes(32), uint64(condition.at("rrf").as_int())),
+ bytes32(tail_hash_as_prog.atom),
+ new_lineage_proof,
+ authorized_providers,
+ proofs_checker,
+ bytes32(condition.at("rf").atom) if new_inner_puzzle_hash is None else new_inner_puzzle_hash,
+ )
+ for condition in conditions.as_iter()
+ if condition.at("f").as_int() == 51 and condition.at("rrf") != Program.to(-113)
+ ]
+
+ return [
+ replace(
+ crcat,
+ coin=Coin(
+ crcat.coin.parent_coin_info,
+ crcat.construct_puzzle(crcat.inner_puzzle_hash).get_tree_hash_precalc( # type: ignore
+ crcat.inner_puzzle_hash
+ ),
+ crcat.coin.amount,
+ ),
+ )
+ for crcat in partially_completed_crcats
+ ]
+
+ def do_spend(
+ self,
+ # CAT solving info
+ previous_coin_id: bytes32,
+ next_coin_proof: LineageProof,
+ previous_subtotal: int,
+ extra_delta: int,
+ # CR layer solving info
+ proof_of_inclusions: Program,
+ proof_checker_solution: Program,
+ provider_id: bytes32,
+ vc_launcher_id: bytes32,
+ vc_inner_puzhash: bytes32,
+ # Inner puzzle and solution
+ inner_puzzle: Program,
+ inner_solution: Program,
+ # For optimization purposes the conditions may already have been run
+ conditions: Optional[Iterable[Program]] = None,
+ ) -> Tuple[List[bytes32], CoinSpend, List["CRCAT"]]:
+ """
+ Spend a CR-CAT.
+
+ Must give the CAT accounting information, the valid VC proof, and the inner puzzle and solution. The function
+ will return the announcement IDs for the VC to optionally assert, the spend of this CAT, and the class
+ representations of any CR-CAT outputs.
+
+ Likely, spend_many is more useful.
+ """
+ # Gather the output information
+ announcement_ids: List[bytes32] = []
+ new_inner_puzzle_hashes_and_amounts: List[Tuple[bytes32, uint64]] = []
+ if conditions is None:
+ conditions = inner_puzzle.run(inner_solution).as_iter() # pragma: no cover
+ assert conditions is not None
+ for condition in conditions:
+ if condition.at("f").as_int() == 51 and condition.at("rrf").as_int() != -113:
+ new_inner_puzzle_hash: bytes32 = bytes32(condition.at("rf").atom)
+ new_amount: uint64 = uint64(condition.at("rrf").as_int())
+ announcement_ids.append(
+ std_hash(self.coin.name() + b"\xcd" + std_hash(new_inner_puzzle_hash + int_to_bytes(new_amount)))
+ )
+ new_inner_puzzle_hashes_and_amounts.append((new_inner_puzzle_hash, new_amount))
+
+ return (
+ announcement_ids,
+ CoinSpend(
+ self.coin,
+ self.construct_puzzle(inner_puzzle),
+ Program.to( # solve_cat
+ [
+ solve_cr_layer(
+ proof_of_inclusions,
+ proof_checker_solution,
+ provider_id,
+ vc_launcher_id,
+ vc_inner_puzhash,
+ self.coin.name(),
+ inner_solution,
+ ),
+ self.lineage_proof.to_program(),
+ previous_coin_id,
+ coin_as_list(self.coin),
+ next_coin_proof.to_program(),
+ previous_subtotal,
+ extra_delta,
+ ]
+ ),
+ ),
+ [
+ CRCAT(
+ Coin(
+ self.coin.name(),
+ self.construct_puzzle(new_inner_puzzle_hash).get_tree_hash_precalc( # type: ignore
+ new_inner_puzzle_hash
+ ),
+ new_amount,
+ ),
+ self.tail_hash,
+ LineageProof(
+ self.coin.parent_coin_info,
+ self.construct_cr_layer(self.inner_puzzle_hash).get_tree_hash_precalc( # type: ignore
+ self.inner_puzzle_hash
+ ),
+ uint64(self.coin.amount),
+ ),
+ self.authorized_providers,
+ self.proofs_checker,
+ new_inner_puzzle_hash,
+ )
+ for new_inner_puzzle_hash, new_amount in new_inner_puzzle_hashes_and_amounts
+ ],
+ )
+
+ @classmethod
+ def spend_many(
+ cls: Type[_T_CRCAT],
+ inner_spends: List[Tuple[_T_CRCAT, Program, Program]], # CRCAT, inner puzzle, inner solution
+ # CR layer solving info
+ proof_of_inclusions: Program,
+ proof_checker_solution: Program,
+ provider_id: bytes32,
+ vc_launcher_id: bytes32,
+ vc_inner_puzhash: bytes32,
+ ) -> Tuple[List[bytes32], List[CoinSpend], List[CRCAT]]:
+ """
+ Spend a multiple CR-CATs.
+
+ This class will handle all of the CAT accounting information, the only necessary information is the inner
+ puzzle/solution, and the proof of a valid VC being spent along side all of the coins. There is currently no
+ support for multiple VCs being used across the spend. There is also currently no support for minting/melting.
+ """
+
+ def next_index(index: int) -> int:
+ return 0 if index == len(inner_spends) - 1 else index + 1
+
+ def prev_index(index: int) -> int:
+ return index - 1
+
+ sorted_inner_spends: List[Tuple[_T_CRCAT, Program, Program]] = sorted(
+ inner_spends,
+ key=lambda spend: spend[0].coin.name(),
+ )
+
+ all_expected_announcements: List[bytes32] = []
+ all_coin_spends: List[CoinSpend] = []
+ all_new_crcats: List[CRCAT] = []
+
+ subtotal: int = 0
+ for i, inner_spend in enumerate(sorted_inner_spends):
+ crcat, inner_puzzle, inner_solution = inner_spend
+ conditions: List[Program] = list(inner_puzzle.run(inner_solution).as_iter())
+ output_amount: uint64 = uint64(
+ sum(
+ c.at("rrf").as_int()
+ for c in conditions
+ if c.at("f").as_int() == 51 and c.at("rrf").as_int() != -113
+ )
+ )
+ next_crcat, _, _ = sorted_inner_spends[next_index(i)]
+ prev_crcat, _, _ = sorted_inner_spends[prev_index(i)]
+ expected_announcements, coin_spend, new_crcats = crcat.do_spend(
+ prev_crcat.coin.name(),
+ LineageProof(
+ next_crcat.coin.parent_coin_info,
+ next_crcat.construct_cr_layer(
+ next_crcat.inner_puzzle_hash, # type: ignore
+ ).get_tree_hash_precalc(next_crcat.inner_puzzle_hash),
+ uint64(next_crcat.coin.amount),
+ ),
+ subtotal,
+ 0, # TODO: add support for mint/melt
+ proof_of_inclusions,
+ proof_checker_solution,
+ provider_id,
+ vc_launcher_id,
+ vc_inner_puzhash,
+ inner_puzzle,
+ inner_solution,
+ conditions=conditions,
+ )
+ all_expected_announcements.extend(expected_announcements)
+ all_coin_spends.append(coin_spend)
+ all_new_crcats.extend(new_crcats)
+
+ subtotal = subtotal + crcat.coin.amount - output_amount
+
+ return all_expected_announcements, all_coin_spends, all_new_crcats
+
+ def expected_announcement(self) -> bytes32:
+ """
+ The announcement a VC must make to this CAT in order to spend it
+ """
+ return std_hash(self.coin.name() + b"\xca")
+
+
+@dataclass(frozen=True)
+class CRCATSpend:
+ crcat: CRCAT
+ inner_puzzle: Program
+ inner_solution: Program
+ children: List[CRCAT]
+ provider_specified: bool
+ inner_conditions: List[Program]
+
+ # Coverage coming with CR-CAT wallet
+ @classmethod
+ def from_coin_spend(cls, spend: CoinSpend) -> CRCATSpend: # pragma: no cover
+ inner_puzzle: Program = CRCAT.get_inner_puzzle(uncurry_puzzle(spend.puzzle_reveal.to_program()))
+ inner_solution: Program = CRCAT.get_inner_solution(spend.solution.to_program())
+ inner_conditions: Program = inner_puzzle.run(inner_solution)
+ return cls(
+ CRCAT.get_current_from_coin_spend(spend),
+ inner_puzzle,
+ inner_solution,
+ CRCAT.get_next_from_coin_spend(spend, conditions=inner_conditions),
+ spend.solution.to_program().at("f").at("rrrrf") == Program.to(None),
+ list(inner_conditions.as_iter()),
+ )
+
+
+@dataclass(frozen=True)
+class ProofsChecker:
+ flags: List[str]
+
+ def as_program(self) -> Program:
+ def byte_sort_flags(f1: str, f2: str) -> int:
+ return 1 if Program.to([10, (1, f1), (1, f2)]).run([]) == Program.to(None) else -1
+
+ return PROOF_FLAGS_CHECKER.curry(
+ [
+ Program.to((flag, 1))
+ for flag in sorted(
+ self.flags,
+ key=functools.cmp_to_key(byte_sort_flags),
+ )
+ ]
+ )
diff --git a/chia/wallet/vc_wallet/cr_puzzles/__init__.py b/chia/wallet/vc_wallet/cr_puzzles/__init__.py
new file mode 100644
diff --git a/chia/wallet/vc_wallet/vc_drivers.py b/chia/wallet/vc_wallet/vc_drivers.py
new file mode 100644
--- /dev/null
+++ b/chia/wallet/vc_wallet/vc_drivers.py
@@ -0,0 +1,820 @@
+from __future__ import annotations
+
+from dataclasses import dataclass, replace
+from typing import Iterator, List, Optional, Tuple, Type, TypeVar
+
+from chia.types.blockchain_format.coin import Coin
+from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_spend import CoinSpend, compute_additions
+from chia.util.hash import std_hash
+from chia.util.ints import uint64
+from chia.util.streamable import Streamable, streamable
+from chia.wallet.lineage_proof import LineageProof
+from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
+from chia.wallet.puzzles.singleton_top_layer_v1_1 import (
+ SINGLETON_LAUNCHER,
+ SINGLETON_LAUNCHER_HASH,
+ SINGLETON_MOD,
+ SINGLETON_MOD_HASH,
+ generate_launcher_coin,
+ puzzle_for_singleton,
+ solution_for_singleton,
+)
+from chia.wallet.uncurried_puzzle import UncurriedPuzzle, uncurry_puzzle
+
+# Mods
+EXTIGENT_METADATA_LAYER = load_clvm_maybe_recompile(
+ "exigent_metadata_layer.clsp",
+ package_or_requirement="chia.wallet.vc_wallet.vc_puzzles",
+ include_standard_libraries=True,
+)
+P2_ANNOUNCED_DELEGATED_PUZZLE: Program = load_clvm_maybe_recompile(
+ "p2_announced_delegated_puzzle.clsp",
+ package_or_requirement="chia.wallet.vc_wallet.vc_puzzles",
+ include_standard_libraries=True,
+)
+COVENANT_LAYER: Program = load_clvm_maybe_recompile(
+ "covenant_layer.clsp", package_or_requirement="chia.wallet.vc_wallet.vc_puzzles", include_standard_libraries=True
+)
+STD_COVENANT_PARENT_MORPHER: Program = load_clvm_maybe_recompile(
+ "std_parent_morpher.clsp",
+ package_or_requirement="chia.wallet.vc_wallet.vc_puzzles",
+ include_standard_libraries=True,
+)
+EML_TP_COVENANT_ADAPTER: Program = load_clvm_maybe_recompile(
+ "eml_transfer_program_covenant_adapter.clsp",
+ package_or_requirement="chia.wallet.vc_wallet.vc_puzzles",
+ include_standard_libraries=True,
+)
+EML_DID_TP: Program = load_clvm_maybe_recompile(
+ "eml_update_metadata_with_DID.clsp",
+ package_or_requirement="chia.wallet.vc_wallet.vc_puzzles",
+ include_standard_libraries=True,
+)
+EXTIGENT_METADATA_LAYER_COVENANT_MORPHER: Program = load_clvm_maybe_recompile(
+ "eml_covenant_morpher.clsp",
+ package_or_requirement="chia.wallet.vc_wallet.vc_puzzles",
+ include_standard_libraries=True,
+)
+VIRAL_BACKDOOR: Program = load_clvm_maybe_recompile(
+ "viral_backdoor.clsp", package_or_requirement="chia.wallet.vc_wallet.vc_puzzles", include_standard_libraries=True
+)
+# (mod (METADATA conditions . solution) (if solution solution (list METADATA () ())))
+# (a (i 7 (q . 7) (q 4 2 (q () ()))) 1)
+ACS_TRANSFER_PROGRAM: Program = Program.to([2, [3, 7, (1, 7), [1, 4, 2, [1, None, None]]], 1])
+
+# Hashes
+EXTIGENT_METADATA_LAYER_HASH = EXTIGENT_METADATA_LAYER.get_tree_hash()
+P2_ANNOUNCED_DELEGATED_PUZZLE_HASH: bytes32 = P2_ANNOUNCED_DELEGATED_PUZZLE.get_tree_hash()
+COVENANT_LAYER_HASH: bytes32 = COVENANT_LAYER.get_tree_hash()
+STD_COVENANT_PARENT_MORPHER_HASH: bytes32 = STD_COVENANT_PARENT_MORPHER.get_tree_hash()
+EML_TP_COVENANT_ADAPTER_HASH: bytes32 = EML_TP_COVENANT_ADAPTER.get_tree_hash()
+EXTIGENT_METADATA_LAYER_COVENANT_MORPHER_HASH: bytes32 = EXTIGENT_METADATA_LAYER_COVENANT_MORPHER.get_tree_hash()
+VIRAL_BACKDOOR_HASH: bytes32 = VIRAL_BACKDOOR.get_tree_hash()
+
+
+# Standard brick puzzle uses the mods above
+STANDARD_BRICK_PUZZLE: Program = load_clvm_maybe_recompile(
+ "standard_vc_backdoor_puzzle.clsp",
+ package_or_requirement="chia.wallet.vc_wallet.vc_puzzles",
+ include_standard_libraries=True,
+).curry(
+ SINGLETON_MOD_HASH,
+ Program.to(SINGLETON_LAUNCHER_HASH).get_tree_hash(),
+ EXTIGENT_METADATA_LAYER_HASH,
+ VIRAL_BACKDOOR_HASH,
+ ACS_TRANSFER_PROGRAM.get_tree_hash(),
+)
+STANDARD_BRICK_PUZZLE_HASH: bytes32 = STANDARD_BRICK_PUZZLE.get_tree_hash()
+STANDARD_BRICK_PUZZLE_HASH_HASH: bytes32 = Program.to(STANDARD_BRICK_PUZZLE_HASH).get_tree_hash()
+
+
+##################
+# Covenant Layer #
+##################
+def create_covenant_layer(initial_puzzle_hash: bytes32, parent_morpher: Program, inner_puzzle: Program) -> Program:
+ return COVENANT_LAYER.curry(
+ initial_puzzle_hash,
+ parent_morpher,
+ inner_puzzle,
+ )
+
+
+def match_covenant_layer(uncurried_puzzle: UncurriedPuzzle) -> Optional[Tuple[bytes32, Program, Program]]:
+ if uncurried_puzzle.mod == COVENANT_LAYER:
+ return (
+ bytes32(uncurried_puzzle.args.at("f").atom),
+ uncurried_puzzle.args.at("rf"),
+ uncurried_puzzle.args.at("rrf"),
+ )
+ else:
+ return None # pragma: no cover
+
+
+def solve_covenant_layer(lineage_proof: LineageProof, morpher_solution: Program, inner_solution: Program) -> Program:
+ solution: Program = Program.to(
+ [
+ lineage_proof.to_program(),
+ morpher_solution,
+ inner_solution,
+ ]
+ )
+ return solution
+
+
+def create_std_parent_morpher(initial_puzzle_hash: bytes32) -> Program:
+ """
+ The standard PARENT_MORPHER for plain coins that want to prove an initial state
+ """
+ return STD_COVENANT_PARENT_MORPHER.curry(
+ STD_COVENANT_PARENT_MORPHER_HASH,
+ COVENANT_LAYER_HASH,
+ initial_puzzle_hash,
+ )
+
+
+####################
+# Covenant Adapter #
+####################
+def create_tp_covenant_adapter(covenant_layer: Program) -> Program:
+ return EML_TP_COVENANT_ADAPTER.curry(covenant_layer)
+
+
+def match_tp_covenant_adapter(uncurried_puzzle: UncurriedPuzzle) -> Optional[Tuple[Program]]: # pragma: no cover
+ if uncurried_puzzle.mod == EML_TP_COVENANT_ADAPTER:
+ return uncurried_puzzle.args.at("f")
+ else:
+ return None
+
+
+##################################
+# Update w/ DID Transfer Program #
+##################################
+def create_did_tp(
+ singleton_mod_hash: bytes32 = SINGLETON_MOD_HASH,
+ singleton_launcher_hash: bytes32 = SINGLETON_LAUNCHER_HASH,
+) -> Program:
+ return EML_DID_TP.curry(
+ singleton_mod_hash,
+ singleton_launcher_hash,
+ )
+
+
+EML_DID_TP_FULL_HASH = create_did_tp().get_tree_hash()
+
+
+def match_did_tp(uncurried_puzzle: UncurriedPuzzle) -> Optional[Tuple[()]]:
+ if uncurried_puzzle.mod == EML_DID_TP:
+ return ()
+ else:
+ return None # pragma: no cover
+
+
+def solve_did_tp(
+ provider_innerpuzhash: bytes32, my_coin_id: bytes32, new_metadata: Program, new_transfer_program: Program
+) -> Program:
+ solution: Program = Program.to(
+ [
+ provider_innerpuzhash,
+ my_coin_id,
+ new_metadata,
+ new_transfer_program,
+ ]
+ )
+ return solution
+
+
+##############################
+# P2 Puzzle or Hidden Puzzle #
+##############################
+def create_viral_backdoor(hidden_puzzle_hash: bytes32, inner_puzzle_hash: bytes32) -> Program:
+ return VIRAL_BACKDOOR.curry(
+ VIRAL_BACKDOOR_HASH,
+ hidden_puzzle_hash,
+ inner_puzzle_hash,
+ )
+
+
+def match_viral_backdoor(uncurried_puzzle: UncurriedPuzzle) -> Optional[Tuple[bytes32, bytes32]]:
+ if uncurried_puzzle.mod == VIRAL_BACKDOOR:
+ return bytes32(uncurried_puzzle.args.at("rf").atom), bytes32(uncurried_puzzle.args.at("rrf").atom)
+ else:
+ return None # pragma: no cover
+
+
+def solve_viral_backdoor(puzzle_reveal: Program, inner_solution: Program, hidden: bool = False) -> Program:
+ solution: Program = Program.to(
+ [
+ hidden,
+ puzzle_reveal,
+ inner_solution,
+ ]
+ )
+ return solution
+
+
+########
+# MISC #
+########
+def create_eml_covenant_morpher(
+ transfer_program_hash: bytes32,
+) -> Program:
+ """
+ A PARENT_MORPHER for use in the covenant layer that proves the parent is a singleton -> EML -> Covenant stack
+ """
+ first_curry: Program = EXTIGENT_METADATA_LAYER_COVENANT_MORPHER.curry(
+ COVENANT_LAYER_HASH,
+ EXTIGENT_METADATA_LAYER_HASH,
+ EML_TP_COVENANT_ADAPTER_HASH,
+ SINGLETON_MOD_HASH,
+ Program.to(SINGLETON_LAUNCHER_HASH).get_tree_hash(),
+ transfer_program_hash,
+ )
+ return first_curry.curry(first_curry.get_tree_hash())
+
+
+def construct_exigent_metadata_layer(
+ metadata: Optional[bytes32],
+ transfer_program: Program,
+ inner_puzzle: Program,
+) -> Program:
+ return EXTIGENT_METADATA_LAYER.curry(
+ EXTIGENT_METADATA_LAYER_HASH,
+ metadata,
+ transfer_program,
+ transfer_program.get_tree_hash(),
+ inner_puzzle,
+ )
+
+
+@streamable
+@dataclass(frozen=True)
+class VCLineageProof(LineageProof, Streamable):
+ """
+ The covenant layer for exigent metadata layers requires to be passed the previous parent's metadata too
+ """
+
+ parent_proof_hash: Optional[bytes32] = None
+
+
+def solve_std_vc_backdoor(
+ launcher_id: bytes32,
+ metadata_hash: bytes32,
+ tp_hash: bytes32,
+ inner_puzzle_hash: bytes32,
+ amount: uint64,
+ eml_lineage_proof: VCLineageProof,
+ provider_innerpuzhash: bytes32,
+ coin_id: bytes32,
+ announcement_nonce: Optional[bytes32] = None,
+) -> Program:
+ """
+ Solution to the STANDARD_BRICK_PUZZLE above. Requires proof info about pretty much the whole puzzle stack.
+ """
+ solution: Program = Program.to(
+ [
+ launcher_id,
+ metadata_hash,
+ tp_hash,
+ STANDARD_BRICK_PUZZLE_HASH_HASH,
+ inner_puzzle_hash,
+ amount,
+ eml_lineage_proof.to_program(),
+ Program.to(eml_lineage_proof.parent_proof_hash),
+ announcement_nonce,
+ Program.to(
+ [
+ provider_innerpuzhash,
+ coin_id,
+ ]
+ ),
+ ]
+ )
+ return solution
+
+
+# Launching to a VC requires a OL with a transfer program that guarantees a () metadata on the next iteration
+# (mod (_ _ (provider tp)) (list (c provider ()) tp ()))
+# (c (c 19 ()) (c 43 (q ())))
+GUARANTEED_NIL_TP: Program = Program.fromhex("ff04ffff04ff13ff8080ffff04ff2bffff01ff80808080")
+OWNERSHIP_LAYER_LAUNCHER: Program = construct_exigent_metadata_layer(
+ None,
+ GUARANTEED_NIL_TP,
+ P2_ANNOUNCED_DELEGATED_PUZZLE,
+)
+OWNERSHIP_LAYER_LAUNCHER_HASH = OWNERSHIP_LAYER_LAUNCHER.get_tree_hash()
+
+
+########################
+# Verified Credentials #
+########################
+_T_VerifiedCredential = TypeVar("_T_VerifiedCredential", bound="VerifiedCredential")
+
+
+@streamable
+@dataclass(frozen=True)
+class VerifiedCredential(Streamable):
+ """
+ This class serves as the main driver for the entire VC puzzle stack. Given the information below, it can sync and
+ spend VerifiedCredentials in any specified manner. Trying to sync from a spend that this class did not create will
+ likely result in an error.
+ """
+
+ coin: Coin
+ singleton_lineage_proof: LineageProof
+ eml_lineage_proof: VCLineageProof
+ launcher_id: bytes32
+ inner_puzzle_hash: bytes32
+ proof_provider: bytes32
+ proof_hash: Optional[bytes32]
+
+ @classmethod
+ def launch(
+ cls: Type[_T_VerifiedCredential],
+ origin_coin: Coin,
+ provider_id: bytes32,
+ new_inner_puzzle_hash: bytes32,
+ memos: List[bytes32],
+ fee: uint64 = uint64(0),
+ ) -> Tuple[Program, List[CoinSpend], _T_VerifiedCredential]:
+ """
+ Launch a VC.
+
+ origin_coin: An XCH coin that will be used to fund the spend. A coin of any amount > 1 can be used and the
+ change will automatically go back to the coin's puzzle hash.
+ provider_id: The DID of the proof provider (the entity who is responsible for adding/removing proofs to the vc)
+ new_inner_puzzle_hash: the innermost puzzle hash once the VC is created
+ memos: The memos to use on the payment to the singleton
+
+ Returns a delegated puzzle to run (with any solution), a list of spends to push with the origin transaction,
+ and an instance of this class representing the expected state after all relevant spends have been pushed and
+ confirmed.
+ """
+ launcher_coin: Coin = generate_launcher_coin(origin_coin, uint64(1))
+
+ # Create the second puzzle for the first launch
+ curried_eve_singleton: Program = puzzle_for_singleton(
+ launcher_coin.name(),
+ OWNERSHIP_LAYER_LAUNCHER,
+ )
+ curried_eve_singleton_hash: bytes32 = curried_eve_singleton.get_tree_hash()
+ launcher_solution = Program.to([curried_eve_singleton_hash, uint64(1), None])
+
+ # Create the final puzzle for the second launch
+ inner_transfer_program: Program = create_did_tp()
+ transfer_program: Program = create_tp_covenant_adapter(
+ create_covenant_layer(
+ curried_eve_singleton_hash,
+ create_eml_covenant_morpher(
+ inner_transfer_program.get_tree_hash(),
+ ),
+ inner_transfer_program,
+ )
+ )
+ wrapped_inner_puzzle_hash: bytes32 = create_viral_backdoor(
+ STANDARD_BRICK_PUZZLE_HASH,
+ new_inner_puzzle_hash,
+ ).get_tree_hash()
+ metadata_layer_hash: bytes32 = construct_exigent_metadata_layer(
+ Program.to((provider_id, None)),
+ transfer_program,
+ wrapped_inner_puzzle_hash, # type: ignore
+ ).get_tree_hash_precalc(wrapped_inner_puzzle_hash)
+ curried_singleton_hash: bytes32 = puzzle_for_singleton(
+ launcher_coin.name(),
+ metadata_layer_hash, # type: ignore
+ ).get_tree_hash_precalc(metadata_layer_hash)
+ launch_dpuz: Program = Program.to(
+ (
+ 1,
+ [
+ [51, wrapped_inner_puzzle_hash, uint64(1), memos],
+ [1, new_inner_puzzle_hash],
+ [-10, provider_id, transfer_program.get_tree_hash()],
+ ],
+ )
+ )
+ second_launcher_solution = Program.to([launch_dpuz, None])
+ second_launcher_coin: Coin = Coin(
+ launcher_coin.name(),
+ curried_eve_singleton_hash,
+ uint64(1),
+ )
+ create_launcher_conditions = Program.to(
+ [
+ [51, SINGLETON_LAUNCHER_HASH, 1],
+ [51, origin_coin.puzzle_hash, origin_coin.amount - fee - 1],
+ [52, fee],
+ [61, std_hash(launcher_coin.name() + launcher_solution.get_tree_hash())],
+ [61, std_hash(second_launcher_coin.name() + launch_dpuz.get_tree_hash())],
+ ]
+ )
+
+ dpuz: Program = Program.to((1, create_launcher_conditions))
+ return (
+ dpuz,
+ [
+ CoinSpend(
+ launcher_coin,
+ SINGLETON_LAUNCHER,
+ launcher_solution,
+ ),
+ CoinSpend(
+ second_launcher_coin,
+ curried_eve_singleton,
+ solution_for_singleton(
+ LineageProof(parent_name=launcher_coin.parent_coin_info, amount=uint64(1)),
+ uint64(1),
+ Program.to(
+ [
+ second_launcher_solution,
+ ]
+ ),
+ ),
+ ),
+ ],
+ cls(
+ Coin(second_launcher_coin.name(), curried_singleton_hash, uint64(1)),
+ LineageProof(
+ parent_name=second_launcher_coin.parent_coin_info,
+ inner_puzzle_hash=OWNERSHIP_LAYER_LAUNCHER.get_tree_hash(),
+ amount=uint64(1),
+ ),
+ VCLineageProof(parent_name=second_launcher_coin.parent_coin_info, amount=uint64(1)),
+ launcher_coin.name(),
+ new_inner_puzzle_hash,
+ provider_id,
+ None,
+ ),
+ )
+
+ ####################################################################################################################
+ # The methods in this section give insight into the structure of the puzzle stack that is considered a "VC"
+ def construct_puzzle(self) -> Program:
+ return puzzle_for_singleton(
+ self.launcher_id,
+ self.construct_exigent_metadata_layer(),
+ )
+
+ def construct_exigent_metadata_layer(self) -> Program:
+ return construct_exigent_metadata_layer(
+ Program.to((self.proof_provider, self.proof_hash)),
+ self.construct_transfer_program(),
+ self.wrap_inner_with_backdoor(),
+ )
+
+ def construct_transfer_program(self) -> Program:
+ curried_eve_singleton_hash: bytes32 = puzzle_for_singleton(
+ self.launcher_id,
+ OWNERSHIP_LAYER_LAUNCHER,
+ ).get_tree_hash()
+ inner_transfer_program: Program = create_did_tp()
+
+ return create_tp_covenant_adapter(
+ create_covenant_layer(
+ curried_eve_singleton_hash,
+ create_eml_covenant_morpher(
+ inner_transfer_program.get_tree_hash(),
+ ),
+ inner_transfer_program,
+ ),
+ )
+
+ def wrap_inner_with_backdoor(self) -> Program:
+ return create_viral_backdoor(
+ self.hidden_puzzle().get_tree_hash(),
+ self.inner_puzzle_hash,
+ )
+
+ def hidden_puzzle(self) -> Program:
+ return STANDARD_BRICK_PUZZLE
+
+ ####################################################################################################################
+
+ @staticmethod
+ def is_vc(puzzle_reveal: UncurriedPuzzle) -> Tuple[bool, str]:
+ """
+ This takes an (uncurried) puzzle reveal and returns a boolean for whether the puzzle is a VC and an error
+ message for if the puzzle is a mismatch. Returns True for VC launcher spends.
+ """
+ if puzzle_reveal.mod != SINGLETON_MOD:
+ return False, "top most layer is not a singleton"
+ layer_below_singleton: UncurriedPuzzle = uncurry_puzzle(puzzle_reveal.args.at("rf"))
+ if layer_below_singleton.mod != EXTIGENT_METADATA_LAYER:
+ return False, "layer below singleton is not an exigent metadata layer"
+
+ # Need to validate both transfer program...
+ full_transfer_program_as_prog: Program = layer_below_singleton.args.at("rrf")
+ full_transfer_program: UncurriedPuzzle = uncurry_puzzle(full_transfer_program_as_prog)
+ if full_transfer_program.mod != EML_TP_COVENANT_ADAPTER:
+ # This is the first spot we'll run into trouble if we're examining a VC being launched
+ # Break off to that logic here
+ if full_transfer_program_as_prog == GUARANTEED_NIL_TP:
+ if layer_below_singleton.args.at("rrrrf") != P2_ANNOUNCED_DELEGATED_PUZZLE:
+ return (
+ False,
+ "tp indicates VC is launching, but it does not have the correct inner puzzle",
+ ) # pragma: no cover
+ else:
+ return True, ""
+ else:
+ return False, "top layer of transfer program is not a covenant layer adapter" # pragma: no cover
+ adapted_transfer_program: UncurriedPuzzle = uncurry_puzzle(full_transfer_program.args.at("f"))
+ if adapted_transfer_program.mod != COVENANT_LAYER:
+ return (
+ False,
+ "transfer program is adapted to covenant layer, but covenant layer did not follow",
+ ) # pragma: no cover
+ morpher: UncurriedPuzzle = uncurry_puzzle(adapted_transfer_program.args.at("rf"))
+ if uncurry_puzzle(morpher.mod).mod != EXTIGENT_METADATA_LAYER_COVENANT_MORPHER:
+ return (
+ False,
+ "covenant for exigent metadata layer does not match the one expected for VCs",
+ ) # pragma: no cover
+ if uncurry_puzzle(adapted_transfer_program.args.at("rrf")).mod != EML_DID_TP:
+ return (
+ False,
+ "transfer program for exigent metadata layer was not the standard VC transfer program",
+ ) # pragma: no cover
+
+ # ...and layer below EML
+ layer_below_eml: UncurriedPuzzle = uncurry_puzzle(layer_below_singleton.args.at("rrrrf"))
+ if layer_below_eml.mod != VIRAL_BACKDOOR:
+ return False, "VC did not have a provider backdoor" # pragma: no cover
+ hidden_puzzle_hash: bytes32 = layer_below_eml.args.at("rf")
+ if hidden_puzzle_hash != STANDARD_BRICK_PUZZLE_HASH:
+ return (
+ False,
+ "VC did not have the standard method to brick in its backdoor hidden puzzle slot",
+ ) # pragma: no cover
+
+ return True, ""
+
+ @classmethod
+ def get_next_from_coin_spend(cls: Type[_T_VerifiedCredential], parent_spend: CoinSpend) -> _T_VerifiedCredential:
+ """
+ Given a coin spend, this will return the next VC that was create as an output of that spend. This is the main
+ method to use when syncing. If a spend has been identified as having a VC puzzle reveal, running this method
+ on that spend should succeed unless the spend in question was the result of a provider using the backdoor to
+ revoke the credential.
+ """
+ coin: Coin = next(c for c in compute_additions(parent_spend) if c.amount % 2 == 1)
+
+ # BEGIN CODE
+ parent_coin: Coin = parent_spend.coin
+ puzzle: Program = parent_spend.puzzle_reveal.to_program()
+ solution: Program = parent_spend.solution.to_program()
+
+ singleton: UncurriedPuzzle = uncurry_puzzle(puzzle)
+ launcher_id: bytes32 = bytes32(singleton.args.at("frf").atom)
+ layer_below_singleton: Program = singleton.args.at("rf")
+ singleton_lineage_proof: LineageProof = LineageProof(
+ parent_name=parent_coin.parent_coin_info,
+ inner_puzzle_hash=layer_below_singleton.get_tree_hash(),
+ amount=uint64(parent_coin.amount),
+ )
+ if layer_below_singleton == OWNERSHIP_LAYER_LAUNCHER:
+ proof_hash: Optional[bytes32] = None
+ eml_lineage_proof: VCLineageProof = VCLineageProof(
+ parent_name=parent_coin.parent_coin_info, amount=uint64(parent_coin.amount)
+ )
+ # See what conditions were output by the launcher dpuz and dsol
+ dpuz: Program = solution.at("rrf").at("f").at("f")
+ dsol: Program = solution.at("rrf").at("f").at("rf")
+
+ conditions: Iterator[Program] = dpuz.run(dsol).as_iter()
+ remark_condition: Program = next(c for c in conditions if c.at("f").as_int() == 1)
+ inner_puzzle_hash = bytes32(remark_condition.at("rf").atom)
+ magic_condition: Program = next(c for c in conditions if c.at("f").as_int() == -10)
+ proof_provider = bytes32(magic_condition.at("rf").atom)
+ else:
+ metadata_layer: UncurriedPuzzle = uncurry_puzzle(layer_below_singleton)
+
+ # Dig to find the inner puzzle / inner solution and extract next inner puzhash and proof hash
+ inner_puzzle: Program = solution.at("rrf").at("f").at("rf")
+ inner_solution: Program = solution.at("rrf").at("f").at("rrf")
+ conditions = inner_puzzle.run(inner_solution).as_iter()
+ new_singleton_condition: Program = next(
+ c for c in conditions if c.at("f").as_int() == 51 and c.at("rrf").as_int() % 2 != 0
+ )
+ inner_puzzle_hash = bytes32(new_singleton_condition.at("rf").atom)
+ magic_condition = next(c for c in conditions if c.at("f").as_int() == -10)
+ if magic_condition.at("rrrf") == Program.to(None):
+ proof_hash_as_prog: Program = metadata_layer.args.at("rfr")
+ elif magic_condition.at("rrrf").atom is not None:
+ raise ValueError("Specified VC was cleared")
+ else:
+ proof_hash_as_prog = magic_condition.at("rrrfrrf")
+
+ proof_hash = None if proof_hash_as_prog == Program.to(None) else bytes32(proof_hash_as_prog.atom)
+
+ proof_provider = bytes32(metadata_layer.args.at("rff").atom)
+
+ parent_proof_hash: bytes32 = metadata_layer.args.at("rf").get_tree_hash()
+ eml_lineage_proof = VCLineageProof(
+ parent_name=parent_coin.parent_coin_info,
+ inner_puzzle_hash=create_viral_backdoor(
+ STANDARD_BRICK_PUZZLE_HASH,
+ inner_puzzle_hash,
+ ).get_tree_hash(),
+ amount=uint64(parent_coin.amount),
+ parent_proof_hash=None if parent_proof_hash == Program.to(None) else parent_proof_hash,
+ )
+
+ new_vc: _T_VerifiedCredential = cls(
+ coin,
+ singleton_lineage_proof,
+ eml_lineage_proof,
+ launcher_id,
+ inner_puzzle_hash,
+ proof_provider,
+ proof_hash,
+ )
+ if new_vc.construct_puzzle().get_tree_hash() != new_vc.coin.puzzle_hash:
+ raise ValueError("Error getting new VC from coin spend, probably the child singleton is not a VC")
+
+ return new_vc
+
+ ####################################################################################################################
+ # The methods in this section are useful for spending an existing VC
+ def magic_condition_for_new_proofs(
+ self,
+ new_proof_hash: Optional[bytes32],
+ provider_innerpuzhash: bytes32,
+ new_proof_provider: Optional[bytes32] = None,
+ ) -> Program:
+ """
+ Returns the 'magic' condition that can update the metadata with a new proof hash. Returning this condition from
+ the inner puzzle will require a corresponding announcement from the provider DID authorizing that proof hash
+ change.
+ """
+ magic_condition: Program = Program.to(
+ [
+ -10,
+ self.eml_lineage_proof.to_program(),
+ [
+ Program.to(self.eml_lineage_proof.parent_proof_hash),
+ self.launcher_id,
+ ],
+ [
+ provider_innerpuzhash,
+ self.coin.name(),
+ Program.to(new_proof_hash),
+ None, # TP update is not allowed because then the singleton will leave the VC protocol
+ ],
+ ]
+ )
+ return magic_condition
+
+ def standard_magic_condition(self) -> Program:
+ """
+ Returns the standard magic condition that needs to be returned to the metadata layer. Returning this condition
+ from the inner puzzle will leave the proof hash and transfer program the same.
+ """
+ magic_condition: Program = Program.to(
+ [
+ -10,
+ self.eml_lineage_proof.to_program(),
+ [
+ Program.to(self.eml_lineage_proof.parent_proof_hash),
+ self.launcher_id,
+ ],
+ None,
+ ]
+ )
+ return magic_condition
+
+ def do_spend(
+ self,
+ inner_puzzle: Program,
+ inner_solution: Program,
+ new_proof_hash: Optional[bytes32] = None,
+ new_proof_provider: Optional[bytes32] = None,
+ ) -> Tuple[Optional[bytes32], CoinSpend, "VerifiedCredential"]:
+ """
+ Given an inner puzzle reveal and solution, spend the VC (potentially updating the proofs in the process).
+ Note that the inner puzzle is already expected to output the 'magic' condition (which can be created above).
+
+ Returns potentially the puzzle announcement the spend will expect from the provider DID, the spend of the VC,
+ and the expected class representation of the new VC after the spend is pushed and confirmed.
+ """
+ vc_solution: Program = solution_for_singleton(
+ self.singleton_lineage_proof,
+ uint64(self.coin.amount),
+ Program.to(
+ [ # solve EML
+ solve_viral_backdoor(
+ inner_puzzle,
+ inner_solution,
+ ),
+ ]
+ ),
+ )
+
+ if new_proof_hash is not None:
+ expected_announcement: Optional[bytes32] = std_hash(
+ self.coin.name()
+ + Program.to(new_proof_hash).get_tree_hash()
+ + b"" # TP update is banned because singleton will leave the VC protocol
+ )
+ else:
+ expected_announcement = None
+
+ new_singleton_condition: Program = next(
+ c for c in inner_puzzle.run(inner_solution).as_iter() if c.at("f") == 51 and c.at("rrf").as_int() % 2 != 0
+ )
+ new_inner_puzzle_hash: bytes32 = bytes32(new_singleton_condition.at("rf").atom)
+
+ return (
+ expected_announcement,
+ CoinSpend(
+ self.coin,
+ self.construct_puzzle(),
+ vc_solution,
+ ),
+ self._next_vc(
+ new_inner_puzzle_hash,
+ self.proof_hash if new_proof_hash is None else new_proof_hash,
+ uint64(new_singleton_condition.at("rrf").as_int()),
+ ),
+ )
+
+ def activate_backdoor(
+ self, provider_innerpuzhash: bytes32, announcement_nonce: Optional[bytes32] = None
+ ) -> Tuple[bytes32, CoinSpend]:
+ """
+ Activates the backdoor in the VC to revoke the credentials and remove the provider's DID.
+
+ Returns the announcement we expect from the provider's DID authorizing this, and the spend of the VC.
+ Sync attempts by this class on spends generated by this method are expected to fail. This could be improved in
+ the future with a separate type/state of VC that is revoked, but perfectly useful as a singleton.
+ """
+ vc_solution: Program = solution_for_singleton(
+ self.singleton_lineage_proof,
+ uint64(self.coin.amount),
+ Program.to(
+ [ # solve EML
+ solve_viral_backdoor(
+ self.hidden_puzzle(),
+ solve_std_vc_backdoor(
+ self.launcher_id,
+ Program.to((self.proof_provider, self.proof_hash)).get_tree_hash(),
+ self.construct_transfer_program().get_tree_hash(),
+ self.inner_puzzle_hash,
+ uint64(self.coin.amount),
+ self.eml_lineage_proof,
+ provider_innerpuzhash,
+ self.coin.name(),
+ announcement_nonce,
+ ),
+ hidden=True,
+ ),
+ ]
+ ),
+ )
+
+ expected_announcement: bytes32 = std_hash(
+ self.coin.name() + Program.to(None).get_tree_hash() + ACS_TRANSFER_PROGRAM.get_tree_hash()
+ )
+
+ return (
+ expected_announcement,
+ CoinSpend(self.coin, self.construct_puzzle(), vc_solution),
+ )
+
+ ####################################################################################################################
+
+ def _next_vc(
+ self, next_inner_puzzle_hash: bytes32, new_proof_hash: Optional[bytes32], next_amount: uint64
+ ) -> "VerifiedCredential":
+ """
+ Private method that creates the next VC class instance.
+ """
+ slightly_incomplete_vc: VerifiedCredential = VerifiedCredential(
+ Coin(self.coin.name(), bytes32([0] * 32), next_amount),
+ LineageProof(
+ self.coin.parent_coin_info,
+ self.construct_exigent_metadata_layer().get_tree_hash(),
+ uint64(self.coin.amount),
+ ),
+ VCLineageProof(
+ self.coin.parent_coin_info,
+ self.wrap_inner_with_backdoor().get_tree_hash(),
+ uint64(self.coin.amount),
+ Program.to((self.proof_provider, self.proof_hash)).get_tree_hash(),
+ ),
+ self.launcher_id,
+ next_inner_puzzle_hash,
+ self.proof_provider,
+ new_proof_hash,
+ )
+
+ return replace(
+ slightly_incomplete_vc,
+ coin=Coin(
+ slightly_incomplete_vc.coin.parent_coin_info,
+ slightly_incomplete_vc.construct_puzzle().get_tree_hash(),
+ slightly_incomplete_vc.coin.amount,
+ ),
+ )
diff --git a/chia/wallet/vc_wallet/vc_puzzles/__init__.py b/chia/wallet/vc_wallet/vc_puzzles/__init__.py
new file mode 100644
diff --git a/chia/wallet/vc_wallet/vc_store.py b/chia/wallet/vc_wallet/vc_store.py
new file mode 100644
--- /dev/null
+++ b/chia/wallet/vc_wallet/vc_store.py
@@ -0,0 +1,239 @@
+from __future__ import annotations
+
+import dataclasses
+from functools import cmp_to_key
+from typing import Dict, List, Optional, Tuple, Type, TypeVar
+
+from aiosqlite import Row
+
+from chia.types.blockchain_format.coin import Coin
+from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.util.db_wrapper import DBWrapper2
+from chia.util.ints import uint32, uint64
+from chia.util.streamable import Streamable, streamable
+from chia.wallet.lineage_proof import LineageProof
+from chia.wallet.util.merkle_utils import list_to_binary_tree
+from chia.wallet.vc_wallet.vc_drivers import VCLineageProof, VerifiedCredential
+
+
+@dataclasses.dataclass(frozen=True)
+class VCProofs:
+ key_value_pairs: Dict[str, str]
+
+ def as_program(self) -> Program:
+ def byte_sort_pairs(f1: Tuple[str, str], f2: Tuple[str, str]) -> int:
+ return 1 if Program.to([10, (1, f1[0]), (1, f2[0])]).run([]) == Program.to(None) else -1
+
+ prog: Program = Program.to(
+ list_to_binary_tree(
+ list(
+ sorted(
+ self.key_value_pairs.items(),
+ key=cmp_to_key(byte_sort_pairs),
+ )
+ )
+ )
+ )
+ return prog
+
+ def root(self) -> bytes32:
+ return self.as_program().get_tree_hash()
+
+ @staticmethod
+ def from_program(prog: Program) -> VCProofs:
+ first: Program = prog.at("f")
+ rest: Program = prog.at("r")
+ if first.atom is None and rest.atom is None:
+ final_dict: Dict[str, str] = {}
+ final_dict.update(VCProofs.from_program(first).key_value_pairs)
+ final_dict.update(VCProofs.from_program(rest).key_value_pairs)
+ return VCProofs(final_dict)
+ elif first.atom is not None and rest.atom is not None:
+ return VCProofs({first.atom.decode("utf-8"): rest.atom.decode("utf-8")})
+ else:
+ raise ValueError("Malformatted VCProofs program") # pragma: no cover
+
+
+_T_VCStore = TypeVar("_T_VCStore", bound="VCStore")
+
+
+@streamable
+@dataclasses.dataclass(frozen=True)
+class VCRecord(Streamable):
+ vc: VerifiedCredential
+ confirmed_at_height: uint32 # 0 == pending confirmation
+
+
+def _row_to_vc_record(row: Row) -> VCRecord:
+ return VCRecord(
+ VerifiedCredential(
+ Coin(bytes32.from_hexstr(row[2]), bytes32.from_hexstr(row[3]), uint64.from_bytes(row[4])),
+ LineageProof.from_bytes(row[5]),
+ VCLineageProof.from_bytes(row[6]),
+ bytes32.from_hexstr(row[0]),
+ bytes32.from_hexstr(row[7]),
+ bytes32.from_hexstr(row[8]),
+ None if row[9] is None else bytes32.from_hexstr(row[9]),
+ ),
+ uint32(row[10]),
+ )
+
+
+class VCStore:
+ """
+ WalletUserStore keeps track of all user created wallets and necessary smart-contract data
+ """
+
+ db_wrapper: DBWrapper2
+
+ @classmethod
+ async def create(cls: Type[_T_VCStore], db_wrapper: DBWrapper2) -> _T_VCStore:
+ self = cls()
+
+ self.db_wrapper = db_wrapper
+
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ await conn.execute(
+ (
+ "CREATE TABLE IF NOT EXISTS vc_records("
+ # VerifiedCredential.launcher_id
+ " launcher_id text PRIMARY KEY,"
+ # VerifiedCredential.coin
+ " coin_id text,"
+ " parent_coin_info text,"
+ " puzzle_hash text,"
+ " amount blob,"
+ # VerifiedCredential.singleton_lineage_proof
+ " singleton_lineage_proof blob,"
+ # VerifiedCredential.ownership_lineage_proof
+ " ownership_lineage_proof blob,"
+ # VerifiedCredential.inner_puzzle_hash
+ " inner_puzzle_hash text,"
+ # VerifiedCredential.proof_provider
+ " proof_provider text,"
+ # VerifiedCredential.proof_hash
+ " proof_hash text,"
+ # VCRecord.confirmed_height
+ " confirmed_height int)"
+ )
+ )
+
+ await conn.execute("CREATE INDEX IF NOT EXISTS coin_id_index ON vc_records(coin_id)")
+ await conn.execute("CREATE INDEX IF NOT EXISTS proof_provider_index ON vc_records(proof_provider)")
+
+ await conn.execute("CREATE TABLE IF NOT EXISTS vc_proofs(root text PRIMARY KEY, proofs blob)")
+
+ return self
+
+ async def _clear_database(self) -> None:
+ async with self.db_wrapper.writer_maybe_transaction() as conn: # pragma: no cover
+ await (await conn.execute("DELETE FROM vc_records")).close()
+
+ async def add_or_replace_vc_record(self, record: VCRecord) -> None:
+ """
+ Store VCRecord in DB.
+
+ If a record with the same launcher ID exists, it will only be replaced if the new record has a higher
+ confirmation height.
+ """
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ await conn.execute(
+ "INSERT or REPLACE INTO vc_records VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
+ (
+ record.vc.launcher_id.hex(),
+ record.vc.coin.name().hex(),
+ record.vc.coin.parent_coin_info.hex(),
+ record.vc.coin.puzzle_hash.hex(),
+ bytes(uint64(record.vc.coin.amount)),
+ bytes(record.vc.singleton_lineage_proof),
+ bytes(record.vc.eml_lineage_proof),
+ record.vc.inner_puzzle_hash.hex(),
+ record.vc.proof_provider.hex(),
+ None if record.vc.proof_hash is None else record.vc.proof_hash.hex(),
+ record.confirmed_at_height,
+ ),
+ )
+
+ async def get_vc_record(self, launcher_id: bytes32) -> Optional[VCRecord]:
+ """
+ Checks DB for VC with specified launcher_id and returns it.
+ """
+ async with self.db_wrapper.reader_no_transaction() as conn:
+ cursor = await conn.execute("SELECT * from vc_records WHERE launcher_id=?", (launcher_id.hex(),))
+ row = await cursor.fetchone()
+ await cursor.close()
+ if row is not None:
+ return _row_to_vc_record(row)
+ return None
+
+ # Coverage coming with CR-CAT Wallet
+ async def get_vc_records_by_providers(self, provider_ids: List[bytes32]) -> List[VCRecord]: # pragma: no cover
+ """
+ Checks DB for VCs with a proof_provider in a specified list and returns them.
+ """
+ async with self.db_wrapper.reader_no_transaction() as conn:
+ providers_param: str = ",".join(["?"] * len(provider_ids))
+ cursor = await conn.execute(
+ f"SELECT * from vc_records WHERE proof_provider IN {providers_param} LIMIT 1000", provider_ids
+ )
+ rows = await cursor.fetchall()
+ await cursor.close()
+
+ return [_row_to_vc_record(row) for row in rows]
+
+ async def get_unconfirmed_vcs(self) -> List[VCRecord]:
+ """
+ Returns all VCs that have not yet been marked confirmed (confirmed_height == 0)
+ """
+ async with self.db_wrapper.reader_no_transaction() as conn:
+ cursor = await conn.execute("SELECT * from vc_records WHERE confirmed_height=0 LIMIT 1000")
+ rows = await cursor.fetchall()
+ await cursor.close()
+ records = [_row_to_vc_record(row) for row in rows]
+
+ return records
+
+ async def get_vc_record_list(
+ self,
+ start_index: int = 0,
+ count: int = 50,
+ ) -> List[VCRecord]:
+ """
+ Return all VCs
+ :param start_index: Start index
+ :param count: How many records will be returned
+ :return:
+ """
+ async with self.db_wrapper.reader_no_transaction() as conn:
+ rows = list(await conn.execute_fetchall("SELECT * from vc_records LIMIT ? OFFSET ? ", (count, start_index)))
+ return [_row_to_vc_record(row) for row in rows]
+
+ async def delete_vc_record(self, launcher_id: bytes32) -> None:
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ await (await conn.execute("DELETE FROM vc_records WHERE launcher_id=?", (launcher_id.hex(),))).close()
+
+ async def get_vc_record_by_coin_id(self, coin_id: bytes32) -> Optional[VCRecord]:
+ async with self.db_wrapper.reader_no_transaction() as conn:
+ cursor = await conn.execute("SELECT * from vc_records WHERE coin_id=? LIMIT 1000", (coin_id.hex(),))
+ row = await cursor.fetchone()
+ await cursor.close()
+ if row is not None:
+ return _row_to_vc_record(row)
+ return None
+
+ async def add_vc_proofs(self, vc_proofs: VCProofs) -> None:
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ await conn.execute(
+ "INSERT INTO vc_proofs VALUES(?, ?)", (vc_proofs.root().hex(), bytes(vc_proofs.as_program()))
+ )
+
+ async def get_proofs_for_root(self, root: bytes32) -> Optional[VCProofs]:
+ async with self.db_wrapper.reader_no_transaction() as conn:
+ cursor = await conn.execute("SELECT proofs FROM vc_proofs WHERE root=?", (root.hex(),))
+ row = await cursor.fetchone()
+ await cursor.close()
+ if row is None:
+ return None # pragma: no cover
+ else:
+ return VCProofs.from_program(Program.from_bytes(row[0]))
diff --git a/chia/wallet/vc_wallet/vc_wallet.py b/chia/wallet/vc_wallet/vc_wallet.py
new file mode 100644
--- /dev/null
+++ b/chia/wallet/vc_wallet/vc_wallet.py
@@ -0,0 +1,452 @@
+from __future__ import annotations
+
+import dataclasses
+import logging
+import time
+from typing import TYPE_CHECKING, List, Optional, Set, Tuple, Type, TypeVar, Union
+
+from blspy import G1Element, G2Element
+
+from chia.protocols.wallet_protocol import CoinState
+from chia.server.ws_connection import WSChiaConnection
+from chia.types.announcement import Announcement
+from chia.types.blockchain_format.coin import Coin, coin_as_list
+from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_spend import CoinSpend
+from chia.types.spend_bundle import SpendBundle
+from chia.util.ints import uint32, uint64, uint128
+from chia.wallet.did_wallet.did_wallet import DIDWallet
+from chia.wallet.payment import Payment
+from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import solution_for_conditions
+from chia.wallet.sign_coin_spends import sign_coin_spends
+from chia.wallet.transaction_record import TransactionRecord
+from chia.wallet.util.compute_memos import compute_memos
+from chia.wallet.util.transaction_type import TransactionType
+from chia.wallet.util.wallet_sync_utils import fetch_coin_spend_for_coin_state
+from chia.wallet.util.wallet_types import WalletType
+from chia.wallet.vc_wallet.vc_drivers import VerifiedCredential
+from chia.wallet.vc_wallet.vc_store import VCRecord, VCStore
+from chia.wallet.wallet import Wallet
+from chia.wallet.wallet_coin_record import WalletCoinRecord
+from chia.wallet.wallet_info import WalletInfo
+
+if TYPE_CHECKING:
+ from chia.wallet.wallet_state_manager import WalletStateManager # pragma: no cover
+
+_T_VCWallet = TypeVar("_T_VCWallet", bound="VCWallet")
+
+
+class VCWallet:
+ wallet_state_manager: WalletStateManager
+ log: logging.Logger
+ standard_wallet: Wallet
+ wallet_info: WalletInfo
+ store: VCStore
+
+ @classmethod
+ async def create_new_vc_wallet(
+ cls: Type[_T_VCWallet],
+ wallet_state_manager: WalletStateManager,
+ wallet: Wallet,
+ name: Optional[str] = None,
+ ) -> _T_VCWallet:
+ name = "VCWallet" if name is None else name
+ new_wallet: _T_VCWallet = await cls.create(
+ wallet_state_manager,
+ wallet,
+ await wallet_state_manager.user_store.create_wallet(name, uint32(WalletType.VC.value), ""),
+ name,
+ )
+ await wallet_state_manager.add_new_wallet(new_wallet)
+ return new_wallet
+
+ @classmethod
+ async def create(
+ cls: Type[_T_VCWallet],
+ wallet_state_manager: WalletStateManager,
+ wallet: Wallet,
+ wallet_info: WalletInfo,
+ name: Optional[str] = None,
+ ) -> _T_VCWallet:
+ self = cls()
+ self.wallet_state_manager = wallet_state_manager
+ self.standard_wallet = wallet
+ self.log = logging.getLogger(name if name else wallet_info.name)
+ self.wallet_info = wallet_info
+ self.store = wallet_state_manager.vc_store
+ return self
+
+ @classmethod
+ def type(cls) -> WalletType:
+ return WalletType.VC
+
+ def id(self) -> uint32:
+ return self.wallet_info.id
+
+ async def coin_added(self, coin: Coin, height: uint32, peer: WSChiaConnection) -> None:
+ """
+ An unspent coin has arrived to our wallet. Get the parent spend to construct the current VerifiedCredential
+ representation of the coin and add it to the DB if it's the newest version of the singleton.
+ """
+ wallet_node = self.wallet_state_manager.wallet_node
+ coin_states: Optional[List[CoinState]] = await wallet_node.get_coin_state([coin.parent_coin_info], peer=peer)
+ if coin_states is None:
+ self.log.error(
+ f"Cannot find parent coin of the verified credential coin: {coin.name().hex()}"
+ ) # pragma: no cover
+ return # pragma: no cover
+ parent_coin_state = coin_states[0]
+ cs = await fetch_coin_spend_for_coin_state(parent_coin_state, peer)
+ if cs is None:
+ self.log.error(
+ f"Cannot get verified credential coin: {coin.name().hex()} puzzle and solution"
+ ) # pragma: no cover
+ return # pragma: no cover
+ vc = VerifiedCredential.get_next_from_coin_spend(cs)
+ vc_record: VCRecord = VCRecord(vc, height)
+ await self.store.add_or_replace_vc_record(vc_record)
+
+ async def remove_coin(self, coin: Coin, height: uint32) -> None:
+ """
+ remove the VC if it is transferred to another key
+ :param coin:
+ :param height:
+ :return:
+ """
+ vc_record: Optional[VCRecord] = await self.store.get_vc_record_by_coin_id(coin.name())
+ if vc_record is not None:
+ await self.store.delete_vc_record(vc_record.vc.launcher_id)
+
+ async def get_vc_record_for_launcher_id(self, launcher_id: bytes32) -> VCRecord:
+ """
+ Go into the store and get the VC Record representing the latest representation of the VC we have on chain.
+ """
+ vc_record = await self.store.get_vc_record(launcher_id)
+ if vc_record is None:
+ raise ValueError(f"Verified credential {launcher_id.hex()} doesn't exist.") # pragma: no cover
+ return vc_record
+
+ async def launch_new_vc(
+ self,
+ provider_did: bytes32,
+ inner_puzzle_hash: Optional[bytes32] = None,
+ fee: uint64 = uint64(0),
+ ) -> Tuple[VCRecord, List[TransactionRecord]]:
+ """
+ Given the DID ID of a proof provider, mint a brand new VC with an empty slot for proofs.
+
+ Returns the tx records associated with the transaction as well as the expected unconfirmed VCRecord.
+ """
+ # Check if we own the DID
+ found_did = False
+ for _, wallet in self.wallet_state_manager.wallets.items():
+ if wallet.type() == WalletType.DECENTRALIZED_ID:
+ assert isinstance(wallet, DIDWallet)
+ if bytes32.fromhex(wallet.get_my_DID()) == provider_did:
+ found_did = True
+ break
+ if not found_did:
+ raise ValueError(f"You don't own the DID {provider_did.hex()}") # pragma: no cover
+ # Mint VC
+ coins = await self.standard_wallet.select_coins(uint64(1 + fee), min_coin_amount=uint64(1 + fee))
+ if len(coins) == 0:
+ raise ValueError("Cannot find a coin to mint the verified credential.") # pragma: no cover
+ if inner_puzzle_hash is None:
+ inner_puzzle_hash = await self.standard_wallet.get_puzzle_hash(new=False) # pragma: no cover
+ original_coin = coins.pop()
+ dpuz, coin_spends, vc = VerifiedCredential.launch(
+ original_coin,
+ provider_did,
+ inner_puzzle_hash,
+ [inner_puzzle_hash],
+ fee=fee,
+ )
+ solution = solution_for_conditions(dpuz.rest())
+ original_puzzle = await self.standard_wallet.puzzle_for_puzzle_hash(original_coin.puzzle_hash)
+ coin_spends.append(CoinSpend(original_coin, original_puzzle, solution))
+ spend_bundle = await sign_coin_spends(
+ coin_spends,
+ self.standard_wallet.secret_key_store.secret_key_for_public_key,
+ self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA,
+ self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
+ )
+ now = uint64(int(time.time()))
+ add_list: List[Coin] = list(spend_bundle.additions())
+ rem_list: List[Coin] = list(spend_bundle.removals())
+ vc_record: VCRecord = VCRecord(vc, uint32(0))
+ tx = TransactionRecord(
+ confirmed_at_height=uint32(0),
+ created_at_time=now,
+ to_puzzle_hash=inner_puzzle_hash,
+ amount=uint64(1),
+ fee_amount=uint64(fee),
+ confirmed=False,
+ sent=uint32(0),
+ spend_bundle=spend_bundle,
+ additions=add_list,
+ removals=rem_list,
+ wallet_id=uint32(1),
+ sent_to=[],
+ trade_id=None,
+ type=uint32(TransactionType.OUTGOING_TX.value),
+ name=spend_bundle.name(),
+ memos=list(compute_memos(spend_bundle).items()),
+ )
+
+ return vc_record, [tx]
+
+ async def generate_signed_transaction(
+ self,
+ vc_id: bytes32,
+ fee: uint64 = uint64(0),
+ new_inner_puzhash: Optional[bytes32] = None,
+ coin_announcements: Optional[Set[bytes]] = None,
+ puzzle_announcements: Optional[Set[bytes]] = None,
+ coin_announcements_to_consume: Optional[Set[Announcement]] = None,
+ puzzle_announcements_to_consume: Optional[Set[Announcement]] = None,
+ new_proof_hash: Optional[bytes32] = None, # Requires that this key posesses the DID to update the specified VC
+ provider_inner_puzhash: Optional[bytes32] = None,
+ reuse_puzhash: Optional[bool] = None,
+ ) -> List[TransactionRecord]:
+ """
+ Entry point for two standard actions:
+ - Cycle the singleton and make an announcement authorizing something
+ - Update the hash of the proofs contained within the VC (new_proof_hash is not None)
+
+ Returns a 1 - 3 TransactionRecord objects depending on whether or not there's a fee and whether or not there's
+ a DID announcement involved.
+ """
+ # Find verified credential
+ vc_record = await self.get_vc_record_for_launcher_id(vc_id)
+ if vc_record.confirmed_at_height == 0:
+ raise ValueError(
+ f"Verified credential {vc_id.hex()} is not confirmed, please try again later."
+ ) # pragma: no cover
+ inner_puzhash: bytes32 = vc_record.vc.inner_puzzle_hash
+ inner_puzzle: Program = await self.standard_wallet.puzzle_for_puzzle_hash(inner_puzhash)
+ if new_inner_puzhash is None:
+ new_inner_puzhash = inner_puzhash
+ if coin_announcements_to_consume is not None:
+ coin_announcements_bytes: Optional[Set[bytes32]] = {
+ a.name() for a in coin_announcements_to_consume
+ } # pragma: no cover
+ else:
+ coin_announcements_bytes = None
+
+ if puzzle_announcements_to_consume is not None:
+ puzzle_announcements_bytes: Optional[Set[bytes32]] = {
+ a.name() for a in puzzle_announcements_to_consume
+ } # pragma: no cover
+ else:
+ puzzle_announcements_bytes = None
+
+ primaries: List[Payment] = [Payment(new_inner_puzhash, uint64(vc_record.vc.coin.amount), [new_inner_puzhash])]
+
+ if fee > 0:
+ announcement_to_make = vc_record.vc.coin.name()
+ chia_tx = await self.wallet_state_manager.main_wallet.create_tandem_xch_tx(
+ fee, Announcement(vc_record.vc.coin.name(), announcement_to_make), reuse_puzhash=reuse_puzhash
+ )
+ if coin_announcements is None:
+ coin_announcements = set((announcement_to_make,))
+ else:
+ coin_announcements.add(announcement_to_make) # pragma: no cover
+ else:
+ chia_tx = None
+ if new_proof_hash is not None:
+ if provider_inner_puzhash is None:
+ for _, wallet in self.wallet_state_manager.wallets.items():
+ if wallet.type() == WalletType.DECENTRALIZED_ID:
+ assert isinstance(wallet, DIDWallet)
+ if wallet.did_info.current_inner is not None and wallet.did_info.origin_coin is not None:
+ if vc_record.vc.proof_provider == wallet.did_info.origin_coin.name():
+ provider_inner_puzhash = wallet.did_info.current_inner.get_tree_hash()
+ break
+ else:
+ continue # pragma: no cover
+ else:
+ raise ValueError("VC could not be updated with specified DID info") # pragma: no cover
+ magic_condition = vc_record.vc.magic_condition_for_new_proofs(new_proof_hash, provider_inner_puzhash)
+ else:
+ magic_condition = vc_record.vc.standard_magic_condition()
+ innersol: Program = self.standard_wallet.make_solution(
+ primaries=primaries,
+ coin_announcements=coin_announcements,
+ puzzle_announcements=puzzle_announcements,
+ coin_announcements_to_assert=coin_announcements_bytes,
+ puzzle_announcements_to_assert=puzzle_announcements_bytes,
+ magic_conditions=[magic_condition],
+ )
+ did_announcement, coin_spend, vc = vc_record.vc.do_spend(inner_puzzle, innersol, new_proof_hash)
+ spend_bundles = [
+ await sign_coin_spends(
+ [coin_spend],
+ self.standard_wallet.secret_key_store.secret_key_for_public_key,
+ self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA,
+ self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
+ )
+ ]
+ if did_announcement is not None:
+ # Need to spend DID
+ for _, wallet in self.wallet_state_manager.wallets.items():
+ if wallet.type() == WalletType.DECENTRALIZED_ID:
+ assert isinstance(wallet, DIDWallet)
+ if bytes32.fromhex(wallet.get_my_DID()) == vc_record.vc.proof_provider:
+ self.log.debug("Creating announcement from DID for vc: %s", vc_id.hex())
+ did_bundle = await wallet.create_message_spend(puzzle_announcements={bytes(did_announcement)})
+ spend_bundles.append(did_bundle)
+ break
+ else:
+ raise ValueError(
+ f"Cannot find the required DID {vc_record.vc.proof_provider.hex()}."
+ ) # pragma: no cover
+ tx_list: List[TransactionRecord] = []
+ if chia_tx is not None and chia_tx.spend_bundle is not None:
+ spend_bundles.append(chia_tx.spend_bundle)
+ tx_list.append(dataclasses.replace(chia_tx, spend_bundle=None))
+ spend_bundle = SpendBundle.aggregate(spend_bundles)
+ now = uint64(int(time.time()))
+ add_list: List[Coin] = list(spend_bundle.additions())
+ rem_list: List[Coin] = list(spend_bundle.removals())
+ tx_list.append(
+ TransactionRecord(
+ confirmed_at_height=uint32(0),
+ created_at_time=now,
+ to_puzzle_hash=new_inner_puzhash,
+ amount=uint64(1),
+ fee_amount=uint64(fee),
+ confirmed=False,
+ sent=uint32(0),
+ spend_bundle=spend_bundle,
+ additions=add_list,
+ removals=rem_list,
+ wallet_id=self.id(),
+ sent_to=[],
+ trade_id=None,
+ type=uint32(TransactionType.OUTGOING_TX.value),
+ name=spend_bundle.name(),
+ memos=list(compute_memos(spend_bundle).items()),
+ )
+ )
+ return tx_list
+
+ async def revoke_vc(
+ self, parent_id: bytes32, peer: WSChiaConnection, fee: uint64 = uint64(0), reuse_puzhash: Optional[bool] = None
+ ) -> List[TransactionRecord]:
+ vc_coin_states: List[CoinState] = await self.wallet_state_manager.wallet_node.get_coin_state(
+ [parent_id], peer=peer
+ )
+ if vc_coin_states is None:
+ raise ValueError(f"Cannot find verified credential coin: {parent_id.hex()}") # pragma: no cover
+ vc_coin_state = vc_coin_states[0]
+ cs: CoinSpend = await fetch_coin_spend_for_coin_state(vc_coin_state, peer)
+ vc: VerifiedCredential = VerifiedCredential.get_next_from_coin_spend(cs)
+
+ # Check if we own the DID
+ did_wallet: DIDWallet
+ for _, wallet in self.wallet_state_manager.wallets.items():
+ if wallet.type() == WalletType.DECENTRALIZED_ID:
+ assert isinstance(wallet, DIDWallet)
+ if bytes32.fromhex(wallet.get_my_DID()) == vc.proof_provider:
+ did_wallet = wallet
+ break
+ else:
+ raise ValueError(f"You don't own the DID {vc.proof_provider.hex()}") # pragma: no cover
+
+ recovery_info: Optional[Tuple[bytes32, bytes32, uint64]] = await did_wallet.get_info_for_recovery()
+ if recovery_info is None:
+ raise RuntimeError("DID could not currently be accessed while trying to revoke VC") # pragma: no cover
+ _, provider_inner_puzhash, _ = recovery_info
+
+ # Generate spend specific nonce
+ coins = await did_wallet.select_coins(uint64(1))
+ assert coins is not None
+ coins.add(vc.coin)
+ if fee > 0:
+ coins.update(await self.standard_wallet.select_coins(fee))
+ sorted_coins: List[Coin] = sorted(coins, key=Coin.name)
+ sorted_coin_list: List[List[Union[bytes32, uint64]]] = [coin_as_list(c) for c in sorted_coins]
+ nonce: bytes32 = Program.to(sorted_coin_list).get_tree_hash()
+ vc_announcement: Announcement = Announcement(vc.coin.name(), nonce)
+
+ # Assemble final bundle
+ expected_did_announcement, vc_spend = vc.activate_backdoor(provider_inner_puzhash, announcement_nonce=nonce)
+ did_spend: SpendBundle = await did_wallet.create_message_spend(
+ puzzle_announcements={expected_did_announcement},
+ coin_announcements_to_assert={vc_announcement},
+ )
+ final_bundle: SpendBundle = SpendBundle.aggregate([SpendBundle([vc_spend], G2Element()), did_spend])
+ tx: TransactionRecord = TransactionRecord(
+ confirmed_at_height=uint32(0),
+ created_at_time=uint64(int(time.time())),
+ to_puzzle_hash=vc.inner_puzzle_hash,
+ amount=uint64(1),
+ fee_amount=uint64(fee),
+ confirmed=False,
+ sent=uint32(0),
+ spend_bundle=final_bundle,
+ additions=list(final_bundle.additions()),
+ removals=list(final_bundle.removals()),
+ wallet_id=self.id(),
+ sent_to=[],
+ trade_id=None,
+ type=uint32(TransactionType.OUTGOING_TX.value),
+ name=final_bundle.name(),
+ memos=list(compute_memos(final_bundle).items()),
+ )
+ if fee > 0:
+ chia_tx: TransactionRecord = await self.wallet_state_manager.main_wallet.create_tandem_xch_tx(
+ fee, vc_announcement, reuse_puzhash
+ )
+ assert tx.spend_bundle is not None
+ assert chia_tx.spend_bundle is not None
+ tx = dataclasses.replace(tx, spend_bundle=SpendBundle.aggregate([chia_tx.spend_bundle, tx.spend_bundle]))
+ chia_tx = dataclasses.replace(chia_tx, spend_bundle=None)
+ return [tx, chia_tx]
+ else:
+ return [tx] # pragma: no cover
+
+ async def select_coins(
+ self,
+ amount: uint64,
+ exclude: Optional[List[Coin]] = None,
+ min_coin_amount: Optional[uint64] = None,
+ max_coin_amount: Optional[uint64] = None,
+ excluded_coin_amounts: Optional[List[uint64]] = None,
+ ) -> Set[Coin]:
+ raise RuntimeError("VCWallet does not support select_coins()") # pragma: no cover
+
+ async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128:
+ """The VC wallet doesn't really have a balance."""
+ return uint128(0) # pragma: no cover
+
+ async def get_unconfirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128:
+ """The VC wallet doesn't really have a balance."""
+ return uint128(0) # pragma: no cover
+
+ async def get_spendable_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128:
+ """The VC wallet doesn't really have a balance."""
+ return uint128(0) # pragma: no cover
+
+ async def get_pending_change_balance(self) -> uint64:
+ return uint64(0) # pragma: no cover
+
+ async def get_max_send_amount(self, records: Optional[Set[WalletCoinRecord]] = None) -> uint128:
+ """This is the confirmed balance, which we set to 0 as the VC wallet doesn't have one."""
+ return uint128(0) # pragma: no cover
+
+ def puzzle_hash_for_pk(self, pubkey: G1Element) -> bytes32:
+ raise RuntimeError("VCWallet does not support puzzle_hash_for_pk") # pragma: no cover
+
+ def require_derivation_paths(self) -> bool:
+ return False
+
+ def get_name(self) -> str:
+ return self.wallet_info.name # pragma: no cover
+
+
+if TYPE_CHECKING:
+ from chia.wallet.wallet_protocol import WalletProtocol # pragma: no cover
+
+ _dummy: WalletProtocol = VCWallet() # pragma: no cover
diff --git a/chia/wallet/wallet.py b/chia/wallet/wallet.py
--- a/chia/wallet/wallet.py
+++ b/chia/wallet/wallet.py
@@ -2,7 +2,7 @@
import logging
import time
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple
+from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Set, Tuple, cast
from blspy import AugSchemeMPL, G1Element, G2Element
@@ -21,6 +21,7 @@
from chia.util.ints import uint32, uint64, uint128
from chia.wallet.coin_selection import select_coins
from chia.wallet.derivation_record import DerivationRecord
+from chia.wallet.payment import Payment
from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
DEFAULT_HIDDEN_PUZZLE_HASH,
calculate_synthetic_secret_key,
@@ -43,7 +44,7 @@
from chia.wallet.transaction_record import TransactionRecord
from chia.wallet.util.compute_memos import compute_memos
from chia.wallet.util.transaction_type import TransactionType
-from chia.wallet.util.wallet_types import AmountWithPuzzlehash, WalletType
+from chia.wallet.util.wallet_types import WalletType
from chia.wallet.wallet_coin_record import WalletCoinRecord
from chia.wallet.wallet_info import WalletInfo
@@ -55,6 +56,11 @@
class Wallet:
+ if TYPE_CHECKING:
+ from chia.wallet.wallet_protocol import WalletProtocol
+
+ _protocol_check: ClassVar[WalletProtocol] = cast("Wallet", None)
+
wallet_info: WalletInfo
wallet_state_manager: Any
log: logging.Logger
@@ -225,26 +231,21 @@ async def get_new_puzzlehash(self) -> bytes32:
def make_solution(
self,
- primaries: List[AmountWithPuzzlehash],
+ primaries: List[Payment],
min_time=0,
me=None,
coin_announcements: Optional[Set[bytes]] = None,
coin_announcements_to_assert: Optional[Set[bytes32]] = None,
puzzle_announcements: Optional[Set[bytes]] = None,
puzzle_announcements_to_assert: Optional[Set[bytes32]] = None,
+ magic_conditions: Optional[List[Any]] = None,
fee=0,
) -> Program:
assert fee >= 0
condition_list = []
if len(primaries) > 0:
for primary in primaries:
- if "memos" in primary:
- memos: Optional[List[bytes]] = primary["memos"]
- if memos is not None and len(memos) == 0:
- memos = None
- else:
- memos = None
- condition_list.append(make_create_coin_condition(primary["puzzlehash"], primary["amount"], memos))
+ condition_list.append(make_create_coin_condition(primary.puzzle_hash, primary.amount, primary.memos))
if min_time > 0:
condition_list.append(make_assert_absolute_seconds_exceeds_condition(min_time))
if me:
@@ -263,6 +264,8 @@ def make_solution(
if puzzle_announcements_to_assert:
for announcement_hash in puzzle_announcements_to_assert:
condition_list.append(make_assert_puzzle_announcement(announcement_hash))
+ if magic_conditions is not None:
+ condition_list.extend(magic_conditions)
return solution_for_conditions(condition_list)
def add_condition_to_solution(self, condition: Program, solution: Program) -> Program:
@@ -315,7 +318,7 @@ async def _generate_unsigned_transaction(
fee: uint64 = uint64(0),
origin_id: bytes32 = None,
coins: Set[Coin] = None,
- primaries_input: Optional[List[AmountWithPuzzlehash]] = None,
+ primaries_input: Optional[List[Payment]] = None,
ignore_max_send_amount: bool = False,
coin_announcements_to_consume: Set[Announcement] = None,
puzzle_announcements_to_consume: Set[Announcement] = None,
@@ -331,15 +334,13 @@ async def _generate_unsigned_transaction(
Generates a unsigned transaction in form of List(Puzzle, Solutions)
Note: this must be called under a wallet state manager lock
"""
- if primaries_input is None:
- primaries: Optional[List[AmountWithPuzzlehash]] = None
- total_amount = amount + fee
- else:
- primaries = primaries_input.copy()
- primaries_amount = 0
- for prim in primaries:
- primaries_amount += prim["amount"]
- total_amount = amount + fee + primaries_amount
+ primaries = []
+ if (primaries_input is None and amount > 0) or primaries_input is not None:
+ primaries.append(Payment(newpuzzlehash, amount, [] if memos is None else memos))
+ if primaries_input is not None:
+ primaries.extend(primaries_input)
+
+ total_amount = sum(primary.amount for primary in primaries) + fee
if reuse_puzhash is None:
reuse_puzhash_config = self.wallet_state_manager.config.get("reuse_public_key_for_change", None)
if reuse_puzhash_config is None:
@@ -379,19 +380,8 @@ async def _generate_unsigned_transaction(
change = spend_value - total_amount
if negative_change_allowed:
change = max(0, change)
- # only kicks in if fee is missing
- if change < 0 and fee + amount == total_amount:
- fee_coins = await self.select_coins(
- # change already includes fee amount
- uint64(abs(change)),
- excluded_coin_amounts=exclude_coin_amounts,
- exclude=([] if exclude_coins is None else list(exclude_coins)) + list(coins or []),
- )
- coins = coins.union(fee_coins)
- spend_value = sum([coin.amount for coin in coins])
- self.log.info(f"Updated spend_value is {spend_value} and total_amount is {total_amount}")
- change = spend_value - total_amount
- assert change >= 0, f"change is negative: {change}"
+
+ assert change >= 0
if coin_announcements_to_consume is not None:
coin_announcements_bytes: Optional[Set[bytes32]] = {a.name() for a in coin_announcements_to_consume}
@@ -406,38 +396,27 @@ async def _generate_unsigned_transaction(
primary_announcement_hash: Optional[bytes32] = None
# Check for duplicates
- if primaries is not None:
- all_primaries_list = [(p["puzzlehash"], p["amount"]) for p in primaries] + [(newpuzzlehash, amount)]
- if len(set(all_primaries_list)) != len(all_primaries_list):
- raise ValueError("Cannot create two identical coins")
- if memos is None:
- memos = []
- assert memos is not None
+ all_primaries_list = [(p.puzzle_hash, p.amount) for p in primaries]
+ if len(set(all_primaries_list)) != len(all_primaries_list):
+ raise ValueError("Cannot create two identical coins")
for coin in coins:
# Only one coin creates outputs
if origin_id in (None, coin.name()):
origin_id = coin.name()
- if primaries is None:
- if amount > 0:
- primaries = [{"puzzlehash": newpuzzlehash, "amount": uint64(amount), "memos": memos}]
- else:
- primaries = []
- else:
- primaries.append({"puzzlehash": newpuzzlehash, "amount": uint64(amount), "memos": memos})
if change > 0:
if reuse_puzhash:
change_puzzle_hash: bytes32 = coin.puzzle_hash
for primary in primaries:
- if change_puzzle_hash == primary["puzzlehash"] and change == primary["amount"]:
+ if change_puzzle_hash == primary.puzzle_hash and change == primary.amount:
# We cannot create two coins has same id, create a new puzhash for the change:
change_puzzle_hash = await self.get_new_puzzlehash()
break
else:
change_puzzle_hash = await self.get_new_puzzlehash()
- primaries.append({"puzzlehash": change_puzzle_hash, "amount": uint64(change), "memos": []})
+ primaries.append(Payment(change_puzzle_hash, uint64(change)))
message_list: List[bytes32] = [c.name() for c in coins]
for primary in primaries:
- message_list.append(Coin(coin.name(), primary["puzzlehash"], primary["amount"]).name())
+ message_list.append(Coin(coin.name(), primary.puzzle_hash, primary.amount).name())
message: bytes32 = std_hash(b"".join(message_list))
puzzle: Program = await self.puzzle_for_puzzle_hash(coin.puzzle_hash)
solution: Program = self.make_solution(
@@ -503,7 +482,7 @@ async def generate_signed_transaction(
fee: uint64 = uint64(0),
origin_id: bytes32 = None,
coins: Set[Coin] = None,
- primaries: Optional[List[AmountWithPuzzlehash]] = None,
+ primaries: Optional[List[Payment]] = None,
ignore_max_send_amount: bool = False,
coin_announcements_to_consume: Set[Announcement] = None,
puzzle_announcements_to_consume: Set[Announcement] = None,
@@ -523,7 +502,7 @@ async def generate_signed_transaction(
if primaries is None:
non_change_amount = amount
else:
- non_change_amount = uint64(amount + sum(p["amount"] for p in primaries))
+ non_change_amount = uint64(amount + sum(p.amount for p in primaries))
self.log.debug("Generating transaction for: %s %s %s", puzzle_hash, amount, repr(coins))
transaction = await self._generate_unsigned_transaction(
@@ -584,51 +563,37 @@ async def generate_signed_transaction(
memos=list(compute_memos(spend_bundle).items()),
)
+ async def create_tandem_xch_tx(
+ self,
+ fee: uint64,
+ announcement_to_assert: Optional[Announcement] = None,
+ reuse_puzhash: Optional[bool] = None,
+ ) -> TransactionRecord:
+ chia_coins = await self.select_coins(fee)
+ if reuse_puzhash is None:
+ reuse_puzhash_config = self.wallet_state_manager.config.get("reuse_public_key_for_change", None)
+ if reuse_puzhash_config is None:
+ reuse_puzhash = False
+ else:
+ reuse_puzhash = reuse_puzhash_config.get(
+ str(self.wallet_state_manager.wallet_node.logged_in_fingerprint), False
+ )
+ chia_tx = await self.generate_signed_transaction(
+ uint64(0),
+ (await self.get_puzzle_hash(not reuse_puzhash)),
+ fee=fee,
+ coins=chia_coins,
+ coin_announcements_to_consume={announcement_to_assert} if announcement_to_assert is not None else None,
+ reuse_puzhash=reuse_puzhash,
+ )
+ assert chia_tx.spend_bundle is not None
+ return chia_tx
+
async def push_transaction(self, tx: TransactionRecord) -> None:
"""Use this API to send transactions."""
await self.wallet_state_manager.add_pending_transaction(tx)
await self.wallet_state_manager.wallet_node.update_ui()
- # This is to be aggregated together with a CAT offer to ensure that the trade happens
- async def create_spend_bundle_relative_chia(self, chia_amount: int, exclude: List[Coin] = []) -> SpendBundle:
- list_of_solutions = []
- utxos = None
-
- # If we're losing value then get coins with at least that much value
- # If we're gaining value then our amount doesn't matter
- if chia_amount < 0:
- utxos = await self.select_coins(uint64(abs(chia_amount)), exclude)
- else:
- utxos = await self.select_coins(uint64(0), exclude)
-
- assert len(utxos) > 0
-
- # Calculate output amount given sum of utxos
- spend_value = sum([coin.amount for coin in utxos])
- chia_amount = spend_value + chia_amount
-
- # Create coin solutions for each utxo
- output_created = None
- for coin in utxos:
- puzzle = await self.puzzle_for_puzzle_hash(coin.puzzle_hash)
- if output_created is None:
- newpuzhash = await self.get_new_puzzlehash()
- primaries: List[AmountWithPuzzlehash] = [
- {"puzzlehash": newpuzhash, "amount": uint64(chia_amount), "memos": []}
- ]
- solution = self.make_solution(primaries=primaries)
- output_created = coin
- list_of_solutions.append(CoinSpend(coin, puzzle, solution))
-
- await self.hack_populate_secret_keys_for_coin_spends(list_of_solutions)
- spend_bundle = await sign_coin_spends(
- list_of_solutions,
- self.secret_key_store.secret_key_for_public_key,
- self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA,
- self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
- )
- return spend_bundle
-
async def get_coins_to_offer(
self,
asset_id: Optional[bytes32],
@@ -651,9 +616,3 @@ async def coin_added(
def get_name(self) -> str:
return "Standard Wallet"
-
-
-if TYPE_CHECKING:
- from chia.wallet.wallet_protocol import WalletProtocol
-
- _dummy: WalletProtocol = Wallet()
diff --git a/chia/wallet/wallet_action.py b/chia/wallet/wallet_action.py
deleted file mode 100644
--- a/chia/wallet/wallet_action.py
+++ /dev/null
@@ -1,36 +0,0 @@
-from __future__ import annotations
-
-from dataclasses import dataclass
-from typing import Optional
-
-from chia.util.ints import uint32
-from chia.wallet.util.wallet_types import WalletType
-
-
-@dataclass(frozen=True)
-class WalletAction:
- """
- This object represents the wallet action as it is stored in the database.
-
- Purpose:
- Some wallets require wallet node to perform a certain action when event happens.
- For Example, CAT wallet needs to fetch solutions once it receives a coin.
- In order to be safe from losing connection, closing the app, etc, those actions need to be persisted.
-
- id: auto-incremented for every added action
- name: Specified by the wallet
- Wallet_id: ID of the wallet that created this action
- type: Type of the wallet that created this action
- wallet_callback: Name of the callback function in the wallet that created this action, if specified it will
- get called when action has been performed.
- done: Indicates if the action has been performed
- data: JSON encoded string containing any data wallet or a wallet_node needs for this specific action.
- """
-
- id: uint32
- name: str
- wallet_id: int
- type: WalletType
- wallet_callback: Optional[str]
- done: bool
- data: str
diff --git a/chia/wallet/wallet_blockchain.py b/chia/wallet/wallet_blockchain.py
--- a/chia/wallet/wallet_blockchain.py
+++ b/chia/wallet/wallet_blockchain.py
@@ -89,7 +89,7 @@ async def new_valid_weight_proof(self, weight_proof: WeightProof, records: List[
await self.set_peak_block(weight_proof.recent_chain_data[-1], latest_timestamp)
await self.clean_block_records()
- async def receive_block(self, block: HeaderBlock) -> Tuple[AddBlockResult, Optional[Err]]:
+ async def add_block(self, block: HeaderBlock) -> Tuple[AddBlockResult, Optional[Err]]:
if self.contains_block(block.header_hash):
return AddBlockResult.ALREADY_HAVE_BLOCK, None
if not self.contains_block(block.prev_header_hash) and block.height > 0:
diff --git a/chia/wallet/wallet_coin_record.py b/chia/wallet/wallet_coin_record.py
--- a/chia/wallet/wallet_coin_record.py
+++ b/chia/wallet/wallet_coin_record.py
@@ -1,12 +1,15 @@
from __future__ import annotations
-from dataclasses import dataclass
+from dataclasses import dataclass, field
+from typing import Any, Dict, Optional
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.coin_record import CoinRecord
-from chia.util.ints import uint32, uint64
-from chia.wallet.util.wallet_types import WalletType
+from chia.util.ints import uint8, uint32, uint64
+from chia.util.misc import VersionedBlob
+from chia.util.streamable import Streamable
+from chia.wallet.util.wallet_types import CoinType, StreamableWalletIdentifier, WalletType
@dataclass(frozen=True)
@@ -23,9 +26,41 @@ class WalletCoinRecord:
coinbase: bool
wallet_type: WalletType
wallet_id: int
+ # Cannot include new attributes in the hash since they will change the coin order in a set.
+ # The launcher coin ID will change and will break all hardcode offer tests in CAT/NFT/DL, etc.
+ # TODO Change hardcode offer in unit tests
+ coin_type: CoinType = field(default=CoinType.NORMAL, hash=False)
+ metadata: Optional[VersionedBlob] = field(default=None, hash=False)
+
+ def wallet_identifier(self) -> StreamableWalletIdentifier:
+ return StreamableWalletIdentifier(uint32(self.wallet_id), uint8(self.wallet_type))
+
+ def parsed_metadata(self) -> Streamable:
+ if self.metadata is None:
+ raise ValueError("Can't parse None metadata")
+ if self.coin_type == CoinType.CLAWBACK:
+ # TODO: Parse proper clawback metadata here when its introduced
+ return self.metadata
+ else:
+ raise ValueError(f"Unknown metadata {self.metadata} for coin_type {self.coin_type}")
def name(self) -> bytes32:
return self.coin.name()
def to_coin_record(self, timestamp: uint64) -> CoinRecord:
return CoinRecord(self.coin, self.confirmed_block_height, self.spent_block_height, self.coinbase, timestamp)
+
+ def to_json_dict_parsed_metadata(self) -> Dict[str, Any]:
+ # TODO: Merge wallet_type and wallet_id into `wallet_identifier`, make `spent` an attribute based
+ # on `spent_height` make `WalletCoinRecord` streamable and use Streamable.to_json_dict as base here if we have
+ # streamable enums.
+ return {
+ **self.coin.to_json_dict(),
+ "id": "0x" + self.name().hex(),
+ "type": int(self.coin_type),
+ "wallet_identifier": self.wallet_identifier().to_json_dict(),
+ "metadata": None if self.metadata is None else self.parsed_metadata().to_json_dict(),
+ "confirmed_height": self.confirmed_block_height,
+ "spent_height": self.spent_block_height,
+ "coinbase": self.coinbase,
+ }
diff --git a/chia/wallet/wallet_coin_store.py b/chia/wallet/wallet_coin_store.py
--- a/chia/wallet/wallet_coin_store.py
+++ b/chia/wallet/wallet_coin_store.py
@@ -1,28 +1,69 @@
from __future__ import annotations
import sqlite3
+from dataclasses import dataclass
+from enum import IntEnum
from typing import Dict, List, Optional, Set
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.db_wrapper import DBWrapper2, execute_fetchone
-from chia.util.ints import uint32, uint64
-from chia.wallet.util.wallet_types import WalletType
+from chia.util.hash import std_hash
+from chia.util.ints import uint8, uint32, uint64
+from chia.util.lru_cache import LRUCache
+from chia.util.misc import UInt32Range, UInt64Range, VersionedBlob
+from chia.util.streamable import Streamable, streamable
+from chia.wallet.util.query_filter import AmountFilter, FilterMode, HashFilter
+from chia.wallet.util.wallet_types import CoinType, WalletType
from chia.wallet.wallet_coin_record import WalletCoinRecord
+class CoinRecordOrder(IntEnum):
+ confirmed_height = 1
+ spent_height = 2
+
+
+@streamable
+@dataclass(frozen=True)
+class GetCoinRecords(Streamable):
+ offset: uint32 = uint32(0)
+ limit: uint32 = uint32(uint32.MAXIMUM_EXCLUSIVE - 1)
+ wallet_id: Optional[uint32] = None
+ wallet_type: Optional[uint8] = None # WalletType
+ coin_type: Optional[uint8] = None # CoinType
+ coin_id_filter: Optional[HashFilter] = None
+ puzzle_hash_filter: Optional[HashFilter] = None
+ parent_coin_id_filter: Optional[HashFilter] = None
+ amount_filter: Optional[AmountFilter] = None
+ amount_range: Optional[UInt64Range] = None
+ confirmed_range: Optional[UInt32Range] = None
+ spent_range: Optional[UInt32Range] = None
+ order: uint8 = uint8(CoinRecordOrder.confirmed_height)
+ reverse: bool = False
+ include_total_count: bool = False # Include the total number of entries for the query without applying offset/limit
+
+
+@dataclass(frozen=True)
+class GetCoinRecordsResult:
+ records: List[WalletCoinRecord]
+ coin_id_to_record: Dict[bytes32, WalletCoinRecord]
+ total_count: Optional[uint32]
+
+
class WalletCoinStore:
"""
This object handles CoinRecords in DB used by wallet.
"""
db_wrapper: DBWrapper2
+ total_count_cache: LRUCache[bytes32, uint32]
@classmethod
async def create(cls, wrapper: DBWrapper2):
self = cls()
self.db_wrapper = wrapper
+ self.total_count_cache = LRUCache(100)
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
@@ -54,13 +95,21 @@ async def create(cls, wrapper: DBWrapper2):
await conn.execute("CREATE INDEX IF NOT EXISTS coin_amount on coin_record(amount)")
+ try:
+ await conn.execute("ALTER TABLE coin_record ADD COLUMN coin_type int DEFAULT 0")
+ await conn.execute("ALTER TABLE coin_record ADD COLUMN metadata blob")
+ await conn.execute("CREATE INDEX IF NOT EXISTS coin_record_coin_type on coin_record(coin_type)")
+ except sqlite3.OperationalError:
+ pass
return self
- async def count_small_unspent(self, cutoff: int) -> int:
+ async def count_small_unspent(self, cutoff: int, coin_type: CoinType = CoinType.NORMAL) -> int:
amount_bytes = bytes(uint64(cutoff))
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(
- conn, "SELECT COUNT(*) FROM coin_record WHERE amount < ? AND spent=0", (amount_bytes,)
+ conn,
+ "SELECT COUNT(*) FROM coin_record WHERE coin_type=? AND amount < ? AND spent=0",
+ (coin_type, amount_bytes),
)
return int(0 if row is None else row[0])
@@ -71,7 +120,9 @@ async def add_coin_record(self, record: WalletCoinRecord, name: Optional[bytes32
assert record.spent == (record.spent_block_height != 0)
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute_insert(
- "INSERT OR REPLACE INTO coin_record VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
+ "INSERT OR REPLACE INTO coin_record ("
+ "coin_name, confirmed_height, spent_height, spent, coinbase, puzzle_hash, coin_parent, amount, "
+ "wallet_type, wallet_id, coin_type, metadata) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
name.hex(),
record.confirmed_block_height,
@@ -83,13 +134,17 @@ async def add_coin_record(self, record: WalletCoinRecord, name: Optional[bytes32
bytes(uint64(record.coin.amount)),
record.wallet_type,
record.wallet_id,
+ record.coin_type,
+ None if record.metadata is None else bytes(record.metadata),
),
)
+ self.total_count_cache.cache.clear()
# Sometimes we realize that a coin is actually not interesting to us so we need to delete it
async def delete_coin_record(self, coin_name: bytes32) -> None:
async with self.db_wrapper.writer_maybe_transaction() as conn:
await (await conn.execute("DELETE FROM coin_record WHERE coin_name=?", (coin_name.hex(),))).close()
+ self.total_count_cache.cache.clear()
# Update coin_record to be spent in DB
async def set_spent(self, coin_name: bytes32, height: uint32) -> None:
@@ -102,11 +157,20 @@ async def set_spent(self, coin_name: bytes32, height: uint32) -> None:
coin_name.hex(),
),
)
+ self.total_count_cache.cache.clear()
def coin_record_from_row(self, row: sqlite3.Row) -> WalletCoinRecord:
coin = Coin(bytes32.fromhex(row[6]), bytes32.fromhex(row[5]), uint64.from_bytes(row[7]))
return WalletCoinRecord(
- coin, uint32(row[1]), uint32(row[2]), bool(row[3]), bool(row[4]), WalletType(row[8]), row[9]
+ coin,
+ uint32(row[1]),
+ uint32(row[2]),
+ bool(row[3]),
+ bool(row[4]),
+ WalletType(row[8]),
+ row[9],
+ CoinType(row[10]),
+ None if row[11] is None else VersionedBlob.from_bytes(row[11]),
)
async def get_coin_record(self, coin_name: bytes32) -> Optional[WalletCoinRecord]:
@@ -120,29 +184,101 @@ async def get_coin_record(self, coin_name: bytes32) -> Optional[WalletCoinRecord
async def get_coin_records(
self,
- coin_names: List[bytes32],
- include_spent_coins: bool = True,
- start_height: uint32 = uint32(0),
- end_height: uint32 = uint32((2**32) - 1),
- ) -> Dict[bytes32, WalletCoinRecord]:
- """Returns CoinRecord with specified coin id."""
- async with self.db_wrapper.reader_no_transaction() as conn:
- rows = list(
- await conn.execute_fetchall(
- f"SELECT * from coin_record WHERE coin_name in ({','.join('?'*len(coin_names))}) "
- f"AND confirmed_height>=? AND confirmed_height<? "
- f"{'' if include_spent_coins else 'AND spent=0'}",
- tuple([c.hex() for c in coin_names]) + (start_height, end_height),
- )
+ *,
+ offset: uint32 = uint32(0),
+ limit: uint32 = uint32(uint32.MAXIMUM_EXCLUSIVE - 1),
+ wallet_id: Optional[uint32] = None,
+ wallet_type: Optional[WalletType] = None,
+ coin_type: Optional[CoinType] = None,
+ coin_id_filter: Optional[HashFilter] = None,
+ puzzle_hash_filter: Optional[HashFilter] = None,
+ parent_coin_id_filter: Optional[HashFilter] = None,
+ amount_filter: Optional[AmountFilter] = None,
+ amount_range: Optional[UInt64Range] = None,
+ confirmed_range: Optional[UInt32Range] = None,
+ spent_range: Optional[UInt32Range] = None,
+ order: CoinRecordOrder = CoinRecordOrder.confirmed_height,
+ reverse: bool = False,
+ include_total_count: bool = False,
+ ) -> GetCoinRecordsResult:
+ conditions = []
+ if wallet_id is not None:
+ conditions.append(f"wallet_id={wallet_id}")
+ if wallet_type is not None:
+ conditions.append(f"wallet_type={wallet_type.value}")
+ if coin_type is not None:
+ conditions.append(f"coin_type={coin_type.value}")
+ for field, hash_filter in {
+ "coin_name": coin_id_filter,
+ "coin_parent": parent_coin_id_filter,
+ "puzzle_hash": puzzle_hash_filter,
+ }.items():
+ if hash_filter is None:
+ continue
+ entries = ",".join(f"{value.hex()!r}" for value in hash_filter.values)
+ conditions.append(
+ f"{field} {'not' if FilterMode(hash_filter.mode) == FilterMode.exclude else ''} in ({entries})"
+ )
+ if confirmed_range is not None and confirmed_range != UInt32Range():
+ conditions.append(f"confirmed_height BETWEEN {confirmed_range.start} AND {confirmed_range.stop}")
+ if spent_range is not None and spent_range != UInt32Range():
+ conditions.append(f"spent_height BETWEEN {spent_range.start} AND {spent_range.stop}")
+ if amount_filter is not None:
+ entries = ",".join(f"X'{bytes(value).hex()}'" for value in amount_filter.values)
+ conditions.append(
+ f"amount {'not' if FilterMode(amount_filter.mode) == FilterMode.exclude else ''} in ({entries})"
)
+ if amount_range is not None and amount_range != UInt64Range():
+ conditions.append(
+ f"amount BETWEEN X'{bytes(amount_range.start).hex()}' AND X'{bytes(amount_range.stop).hex()}'"
+ )
+
+ where_sql = "WHERE " + " AND ".join(conditions) if len(conditions) > 0 else ""
+ order_sql = f"ORDER BY {order.name} {'DESC' if reverse else 'ASC'}, rowid"
+ limit_sql = f"LIMIT {offset}, {limit}" if offset > 0 or limit < uint32.MAXIMUM_EXCLUSIVE - 1 else ""
+ query_sql = f"{where_sql} {order_sql} {limit_sql}"
- ret: Dict[bytes32, WalletCoinRecord] = {}
+ async with self.db_wrapper.reader_no_transaction() as conn:
+ rows = await conn.execute_fetchall(f"SELECT * FROM coin_record {query_sql}")
+
+ total_count = None
+ if include_total_count:
+ cache_hash = std_hash(bytes(where_sql, encoding="utf8")) # Only use the conditions here
+ total_count = self.total_count_cache.get(cache_hash)
+ if total_count is None:
+ row = await execute_fetchone(conn, f"SELECT COUNT(coin_name) FROM coin_record {where_sql}")
+ assert row is not None and len(row) == 1, "COUNT should always return one value"
+ total_count = uint32(row[0])
+ self.total_count_cache.put(cache_hash, total_count)
+
+ records: List[WalletCoinRecord] = []
+ coin_id_to_record: Dict[bytes32, WalletCoinRecord] = {}
for row in rows:
- record = self.coin_record_from_row(row)
- coin_name = bytes32.fromhex(row[0])
- ret[coin_name] = record
+ records.append(self.coin_record_from_row(row))
+ coin_id_to_record[bytes32.fromhex(row[0])] = records[-1]
- return ret
+ return GetCoinRecordsResult(
+ records,
+ coin_id_to_record,
+ total_count,
+ )
+
+ async def get_coin_records_between(
+ self, wallet_id: int, start: int, end: int, reverse: bool = False, coin_type: CoinType = CoinType.NORMAL
+ ) -> List[WalletCoinRecord]:
+ """Return a list of coins between start and end index. List is in reverse chronological order.
+ start = 0 is most recent transaction
+ """
+ limit = end - start
+ query_str = "ORDER BY confirmed_height " + ("DESC" if reverse else "ASC")
+
+ async with self.db_wrapper.reader_no_transaction() as conn:
+ rows = await conn.execute_fetchall(
+ f"SELECT * FROM coin_record WHERE coin_type=? AND"
+ f" wallet_id=? {query_str}, rowid LIMIT {start}, {limit}",
+ (coin_type, wallet_id),
+ )
+ return [self.coin_record_from_row(row) for row in rows]
async def get_first_coin_height(self) -> Optional[uint32]:
"""Returns height of first confirmed coin"""
@@ -154,18 +290,23 @@ async def get_first_coin_height(self) -> Optional[uint32]:
return None
- async def get_unspent_coins_for_wallet(self, wallet_id: int) -> Set[WalletCoinRecord]:
+ async def get_unspent_coins_for_wallet(
+ self, wallet_id: int, coin_type: CoinType = CoinType.NORMAL
+ ) -> Set[WalletCoinRecord]:
"""Returns set of CoinRecords that have not been spent yet for a wallet."""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
- "SELECT * FROM coin_record WHERE wallet_id=? AND spent_height=0", (wallet_id,)
+ "SELECT * FROM coin_record WHERE coin_type=? AND wallet_id=? AND spent_height=0",
+ (coin_type, wallet_id),
)
return set(self.coin_record_from_row(row) for row in rows)
- async def get_all_unspent_coins(self) -> Set[WalletCoinRecord]:
+ async def get_all_unspent_coins(self, coin_type: CoinType = CoinType.NORMAL) -> Set[WalletCoinRecord]:
"""Returns set of CoinRecords that have not been spent yet for a wallet."""
async with self.db_wrapper.reader_no_transaction() as conn:
- rows = await conn.execute_fetchall("SELECT * FROM coin_record WHERE spent_height=0")
+ rows = await conn.execute_fetchall(
+ "SELECT * FROM coin_record WHERE coin_type=? AND spent_height=0", (coin_type,)
+ )
return set(self.coin_record_from_row(row) for row in rows)
# Checks DB and DiffStores for CoinRecords with puzzle_hash and returns them
@@ -200,3 +341,10 @@ async def rollback_to_block(self, height: int) -> None:
(height,),
)
).close()
+ self.total_count_cache.cache.clear()
+
+ async def delete_wallet(self, wallet_id: uint32) -> None:
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ cursor = await conn.execute("DELETE FROM coin_record WHERE wallet_id=?", (wallet_id,))
+ await cursor.close()
+ self.total_count_cache.cache.clear()
diff --git a/chia/wallet/wallet_nft_store.py b/chia/wallet/wallet_nft_store.py
--- a/chia/wallet/wallet_nft_store.py
+++ b/chia/wallet/wallet_nft_store.py
@@ -272,3 +272,8 @@ async def rollback_to_block(self, height: int) -> bool:
if result.rowcount > 0:
return True
return False
+
+ async def delete_wallet(self, wallet_id: uint32) -> None:
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ cursor = await conn.execute("DELETE FROM users_nfts WHERE wallet_id=?", (wallet_id,))
+ await cursor.close()
diff --git a/chia/wallet/wallet_node.py b/chia/wallet/wallet_node.py
--- a/chia/wallet/wallet_node.py
+++ b/chia/wallet/wallet_node.py
@@ -15,15 +15,24 @@
from blspy import AugSchemeMPL, G1Element, G2Element, PrivateKey
from packaging.version import Version
-from chia.consensus.block_record import BlockRecord
from chia.consensus.blockchain import AddBlockResult
from chia.consensus.constants import ConsensusConstants
from chia.daemon.keychain_proxy import KeychainProxy, connect_to_keychain_and_validate, wrap_local_keychain
from chia.full_node.full_node_api import FullNodeAPI
-from chia.protocols import wallet_protocol
from chia.protocols.full_node_protocol import RequestProofOfWeight, RespondProofOfWeight
from chia.protocols.protocol_message_types import ProtocolMessageTypes
-from chia.protocols.wallet_protocol import CoinState, RespondBlockHeader, RespondToCoinUpdates
+from chia.protocols.wallet_protocol import (
+ CoinState,
+ CoinStateUpdate,
+ NewPeakWallet,
+ RegisterForCoinUpdates,
+ RequestBlockHeader,
+ RequestChildren,
+ RespondBlockHeader,
+ RespondChildren,
+ RespondToCoinUpdates,
+ SendTransaction,
+)
from chia.rpc.rpc_server import StateChangedProtocol, default_get_connections
from chia.server.node_discovery import WalletPeers
from chia.server.outbound_message import Message, NodeType, make_msg
@@ -32,10 +41,9 @@
from chia.server.ws_connection import WSChiaConnection
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.sized_bytes import bytes32
-from chia.types.blockchain_format.sub_epoch_summary import SubEpochSummary
-from chia.types.coin_spend import CoinSpend
from chia.types.header_block import HeaderBlock
from chia.types.mempool_inclusion_status import MempoolInclusionStatus
+from chia.types.spend_bundle import SpendBundle
from chia.types.weight_proof import WeightProof
from chia.util.chunks import chunks
from chia.util.config import (
@@ -46,10 +54,11 @@
)
from chia.util.db_wrapper import manage_connection
from chia.util.errors import KeychainIsEmpty, KeychainIsLocked, KeychainKeyNotFound, KeychainProxyConnectionFailure
-from chia.util.ints import uint32, uint64
+from chia.util.ints import uint32, uint64, uint128
from chia.util.keychain import Keychain
from chia.util.path import path_from_root
from chia.util.profiler import mem_profile_task, profile_task
+from chia.util.streamable import Streamable, streamable
from chia.wallet.transaction_record import TransactionRecord
from chia.wallet.util.new_peak_queue import NewPeakItem, NewPeakQueue, NewPeakQueueTypes
from chia.wallet.util.peer_request_cache import PeerRequestCache, can_use_peer_request_cache
@@ -57,10 +66,10 @@
PeerRequestException,
fetch_header_blocks_in_range,
fetch_last_tx_from_peer,
- last_change_height_cs,
request_and_validate_additions,
request_and_validate_removals,
request_header_blocks,
+ sort_coin_states,
subscribe_to_coin_updates,
subscribe_to_phs,
)
@@ -85,9 +94,21 @@ def get_wallet_db_path(root_path: Path, config: Dict[str, Any], key_fingerprint:
return path
+@streamable
+@dataclasses.dataclass(frozen=True)
+class Balance(Streamable):
+ confirmed_wallet_balance: uint128 = uint128(0)
+ unconfirmed_wallet_balance: uint128 = uint128(0)
+ spendable_balance: uint128 = uint128(0)
+ pending_change: uint64 = uint64(0)
+ max_send_amount: uint128 = uint128(0)
+ unspent_coin_count: uint32 = uint32(0)
+ pending_coin_removal_count: uint32 = uint32(0)
+
+
@dataclasses.dataclass
class WalletNode:
- config: Dict
+ config: Dict[str, Any]
root_path: Path
constants: ConsensusConstants
local_keychain: Optional[Keychain] = None
@@ -99,14 +120,14 @@ class WalletNode:
_wallet_state_manager: Optional[WalletStateManager] = None
_weight_proof_handler: Optional[WalletWeightProofHandler] = None
_server: Optional[ChiaServer] = None
- sync_task: Optional[asyncio.Task] = None
+ sync_task: Optional[asyncio.Task[None]] = None
logged_in_fingerprint: Optional[int] = None
logged_in: bool = False
_keychain_proxy: Optional[KeychainProxy] = None
+ _balance_cache: Dict[int, Balance] = dataclasses.field(default_factory=dict)
# Peers that we have long synced to
synced_peers: Set[bytes32] = dataclasses.field(default_factory=set)
wallet_peers: Optional[WalletPeers] = None
- valid_wp_cache: Dict[bytes32, Any] = dataclasses.field(default_factory=dict)
peer_caches: Dict[bytes32, PeerRequestCache] = dataclasses.field(default_factory=dict)
# in Untrusted mode wallet might get the state update before receiving the block
race_cache: Dict[bytes32, Set[CoinState]] = dataclasses.field(default_factory=dict)
@@ -117,13 +138,14 @@ class WalletNode:
LONG_SYNC_THRESHOLD: int = 300
last_wallet_tx_resend_time: int = 0
# Duration in seconds
+ coin_state_retry_seconds: int = 10
wallet_tx_resend_timeout_secs: int = 1800
_new_peak_queue: Optional[NewPeakQueue] = None
_shut_down: bool = False
- _process_new_subscriptions_task: Optional[asyncio.Task] = None
- _retry_failed_states_task: Optional[asyncio.Task] = None
- _secondary_peer_sync_task: Optional[asyncio.Task] = None
+ _process_new_subscriptions_task: Optional[asyncio.Task[None]] = None
+ _retry_failed_states_task: Optional[asyncio.Task[None]] = None
+ _secondary_peer_sync_task: Optional[asyncio.Task[None]] = None
@property
def keychain_proxy(self) -> KeychainProxy:
@@ -174,12 +196,12 @@ async def ensure_keychain_proxy(self) -> KeychainProxy:
raise KeychainProxyConnectionFailure()
return self._keychain_proxy
- def get_cache_for_peer(self, peer) -> PeerRequestCache:
+ def get_cache_for_peer(self, peer: WSChiaConnection) -> PeerRequestCache:
if peer.peer_node_id not in self.peer_caches:
self.peer_caches[peer.peer_node_id] = PeerRequestCache()
return self.peer_caches[peer.peer_node_id]
- def rollback_request_caches(self, reorg_height: int):
+ def rollback_request_caches(self, reorg_height: int) -> None:
# Everything after reorg_height should be removed from the cache
for cache in self.peer_caches.values():
cache.clear_after_height(reorg_height)
@@ -258,6 +280,8 @@ async def reset_sync_db(self, db_path: Union[Path, str], fingerprint: int) -> bo
"coin_of_interest_to_trade_record",
"notifications",
"retry_store",
+ "vc_records",
+ "vc_proofs",
]
async with manage_connection(db_path) as conn:
@@ -355,14 +379,10 @@ async def _start_with_fingerprint(
self,
)
- if self.wallet_peers is None:
- self.initialize_wallet_peers()
-
if self.state_changed_callback is not None:
self.wallet_state_manager.set_callback(self.state_changed_callback)
self.last_wallet_tx_resend_time = int(time.time())
- self.last_state_retry_time = int(time.time())
self.wallet_tx_resend_timeout_secs = self.config.get("tx_resend_timeout_secs", 60 * 60)
self.wallet_state_manager.set_pending_callback(self._pending_tx_handler)
self._shut_down = False
@@ -373,13 +393,22 @@ async def _start_with_fingerprint(
self.log_in(private_key)
self.wallet_state_manager.state_changed("sync_changed")
+ # Populate the balance caches for all wallets
+ async with self.wallet_state_manager.lock:
+ for wallet_id in self.wallet_state_manager.wallets:
+ await self._update_balance_cache(wallet_id)
+
async with self.wallet_state_manager.puzzle_store.lock:
index = await self.wallet_state_manager.puzzle_store.get_last_derivation_path()
if index is None or index < self.wallet_state_manager.initial_num_public_keys - 1:
await self.wallet_state_manager.create_more_puzzle_hashes(from_zero=True)
+
+ if self.wallet_peers is None:
+ self.initialize_wallet_peers()
+
return True
- def _close(self):
+ def _close(self) -> None:
self.log.info("self._close")
self.log_out()
self._shut_down = True
@@ -392,7 +421,7 @@ def _close(self):
if self._secondary_peer_sync_task is not None:
self._secondary_peer_sync_task.cancel()
- async def _await_closed(self, shutting_down: bool = True):
+ async def _await_closed(self, shutting_down: bool = True) -> None:
self.log.info("self._await_closed")
if self._server is not None:
await self.server.close_all_connections()
@@ -407,6 +436,7 @@ async def _await_closed(self, shutting_down: bool = True):
await proxy.close()
await asyncio.sleep(0.5) # https://docs.aiohttp.org/en/stable/client_advanced.html#graceful-shutdown
self.wallet_peers = None
+ self._balance_cache = {}
def _set_state_changed_callback(self, callback: StateChangedProtocol) -> None:
self.state_changed_callback = callback
@@ -415,12 +445,12 @@ def _set_state_changed_callback(self, callback: StateChangedProtocol) -> None:
self.wallet_state_manager.set_callback(self.state_changed_callback)
self.wallet_state_manager.set_pending_callback(self._pending_tx_handler)
- def _pending_tx_handler(self):
+ def _pending_tx_handler(self) -> None:
if self._wallet_state_manager is None:
return None
asyncio.create_task(self._resend_queue())
- async def _resend_queue(self):
+ async def _resend_queue(self) -> None:
if self._shut_down or self._server is None or self._wallet_state_manager is None:
return None
@@ -451,10 +481,7 @@ async def _messages_to_resend(self) -> List[Tuple[Message, Set[bytes32]]]:
for record in records:
if record.spend_bundle is None:
continue
- msg = make_msg(
- ProtocolMessageTypes.send_transaction,
- wallet_protocol.SendTransaction(record.spend_bundle),
- )
+ msg = make_msg(ProtocolMessageTypes.send_transaction, SendTransaction(record.spend_bundle))
already_sent = set()
for peer, status, _ in record.sent_to:
if status == MempoolInclusionStatus.SUCCESS.value:
@@ -463,46 +490,38 @@ async def _messages_to_resend(self) -> List[Tuple[Message, Set[bytes32]]]:
return messages
- async def _retry_failed_states(self):
+ async def _retry_failed_states(self) -> None:
while not self._shut_down:
try:
- await asyncio.sleep(5)
- current_time = time.time()
- if self.last_state_retry_time < current_time - 10:
- self.last_state_retry_time = current_time
- if self.wallet_state_manager is None:
- continue
- states_to_retry = await self.wallet_state_manager.retry_store.get_all_states_to_retry()
- for state, peer_id, fork_height in states_to_retry:
- matching_peer = tuple(
- p for p in self.server.get_connections(NodeType.FULL_NODE) if p.peer_node_id == peer_id
- )
- if len(matching_peer) == 0:
+ await asyncio.sleep(self.coin_state_retry_seconds)
+ if self.wallet_state_manager is None:
+ continue
+ states_to_retry = await self.wallet_state_manager.retry_store.get_all_states_to_retry()
+ for state, peer_id, fork_height in states_to_retry:
+ matching_peer = tuple(
+ p for p in self.server.get_connections(NodeType.FULL_NODE) if p.peer_node_id == peer_id
+ )
+ if len(matching_peer) == 0:
+ try:
peer = self.get_full_node_peer()
- if peer is None:
- self.log.info(f"disconnected from all peers, cannot retry state: {state}")
- continue
- else:
- self.log.info(
- f"disconnected from peer {peer_id}, state will retry with {peer.peer_node_id}"
- )
- else:
- peer = matching_peer[0]
- async with self.wallet_state_manager.db_wrapper.writer():
- self.log.info(f"retrying coin_state: {state}")
- try:
- await self.wallet_state_manager.add_coin_states(
- [state], peer, None if fork_height == 0 else fork_height
- )
- except Exception as e:
- self.log.exception(f"Exception while adding states.. : {e}")
- else:
- await self.wallet_state_manager.blockchain.clean_block_records()
+ self.log.info(
+ f"disconnected from peer {peer_id}, state will retry with {peer.peer_node_id}"
+ )
+ except ValueError:
+ self.log.info(f"disconnected from all peers, cannot retry state: {state}")
+ continue
+ else:
+ peer = matching_peer[0]
+ async with self.wallet_state_manager.db_wrapper.writer():
+ self.log.info(f"retrying coin_state: {state}")
+ await self.wallet_state_manager.add_coin_states(
+ [state], peer, None if fork_height == 0 else fork_height
+ )
except asyncio.CancelledError:
self.log.info("Retry task cancelled, exiting.")
raise
- async def _process_new_subscriptions(self):
+ async def _process_new_subscriptions(self) -> None:
while not self._shut_down:
# Here we process four types of messages in the queue, where the first one has higher priority (lower
# number in the queue), and priority decreases for each type.
@@ -513,7 +532,7 @@ async def _process_new_subscriptions(self):
item = await self.new_peak_queue.get()
assert item is not None
if item.item_type == NewPeakQueueTypes.COIN_ID_SUBSCRIPTION:
- self.log.debug("Pulled from queue: %s %s", item.item_type, item.data)
+ self.log.debug("Pulled from queue: %s %s", item.item_type.name, item.data)
# Subscriptions are the highest priority, because we don't want to process any more peaks or
# state updates until we are sure that we subscribed to everything that we need to. Otherwise,
# we might not be able to process some state.
@@ -522,33 +541,33 @@ async def _process_new_subscriptions(self):
coin_states: List[CoinState] = await subscribe_to_coin_updates(coin_ids, peer, uint32(0))
if len(coin_states) > 0:
async with self.wallet_state_manager.lock:
- await self.receive_state_from_peer(coin_states, peer)
+ await self.add_states_from_peer(coin_states, peer)
elif item.item_type == NewPeakQueueTypes.PUZZLE_HASH_SUBSCRIPTION:
- self.log.debug("Pulled from queue: %s %s", item.item_type, item.data)
+ self.log.debug("Pulled from queue: %s %s", item.item_type.name, item.data)
puzzle_hashes: List[bytes32] = item.data
for peer in self.server.get_connections(NodeType.FULL_NODE):
# Puzzle hash subscription
- coin_states: List[CoinState] = await subscribe_to_phs(puzzle_hashes, peer, uint32(0))
+ coin_states = await subscribe_to_phs(puzzle_hashes, peer, uint32(0))
if len(coin_states) > 0:
async with self.wallet_state_manager.lock:
- await self.receive_state_from_peer(coin_states, peer)
+ await self.add_states_from_peer(coin_states, peer)
elif item.item_type == NewPeakQueueTypes.FULL_NODE_STATE_UPDATED:
# Note: this can take a while when we have a lot of transactions. We want to process these
# before new_peaks, since new_peak_wallet requires that we first obtain the state for that peak.
- self.log.debug("Pulled from queue: %s %s", item.item_type, item.data[0])
- request: wallet_protocol.CoinStateUpdate = item.data[0]
+ self.log.debug("Pulled from queue: %s %s", item.item_type.name, item.data[0])
+ coin_state_update = item.data[0]
peer = item.data[1]
assert peer is not None
- await self.state_update_received(request, peer)
+ await self.state_update_received(coin_state_update, peer)
elif item.item_type == NewPeakQueueTypes.NEW_PEAK_WALLET:
- self.log.debug("Pulled from queue: %s %s", item.item_type, item.data[0])
+ self.log.debug("Pulled from queue: %s %s", item.item_type.name, item.data[0])
# This can take a VERY long time, because it might trigger a long sync. It is OK if we miss some
# subscriptions or state updates, since all subscriptions and state updates will be handled by
# long_sync (up to the target height).
- request: wallet_protocol.NewPeakWallet = item.data[0]
+ new_peak = item.data[0]
peer = item.data[1]
assert peer is not None
- await self.new_peak_wallet(request, peer)
+ await self.new_peak_wallet(new_peak, peer)
else:
self.log.debug("Pulled from queue: UNKNOWN %s", item.item_type)
assert False
@@ -560,7 +579,7 @@ async def _process_new_subscriptions(self):
if peer is not None:
await peer.close(9999)
- def log_in(self, sk: PrivateKey):
+ def log_in(self, sk: PrivateKey) -> None:
self.logged_in_fingerprint = sk.get_g1().get_fingerprint()
self.logged_in = True
self.log.info(f"Wallet is logged in using key with fingerprint: {self.logged_in_fingerprint}")
@@ -569,7 +588,7 @@ def log_in(self, sk: PrivateKey):
except Exception:
self.log.exception("Non-fatal: Unable to update last used fingerprint.")
- def log_out(self):
+ def log_out(self) -> None:
self.logged_in_fingerprint = None
self.logged_in = False
@@ -596,11 +615,11 @@ def get_last_used_fingerprint_path(self) -> Path:
fingerprint_path = db_path.parent / "last_used_fingerprint"
return fingerprint_path
- def set_server(self, server: ChiaServer):
+ def set_server(self, server: ChiaServer) -> None:
self._server = server
self.initialize_wallet_peers()
- def initialize_wallet_peers(self):
+ def initialize_wallet_peers(self) -> None:
self.server.on_connect = self.on_connect
network_name = self.config["selected_network"]
@@ -627,7 +646,7 @@ def initialize_wallet_peers(self):
)
asyncio.create_task(self.wallet_peers.start())
- def on_disconnect(self, peer: WSChiaConnection):
+ def on_disconnect(self, peer: WSChiaConnection) -> None:
if self.is_trusted(peer):
self.local_node_synced = False
self.initialize_wallet_peers()
@@ -641,11 +660,11 @@ def on_disconnect(self, peer: WSChiaConnection):
self.wallet_state_manager.state_changed("close_connection")
- async def on_connect(self, peer: WSChiaConnection):
+ async def on_connect(self, peer: WSChiaConnection) -> None:
if self._wallet_state_manager is None:
return None
- if Version(peer.protocol_version) < Version("0.0.33"):
+ if peer.protocol_version < Version("0.0.33"):
self.log.info("Disconnecting, full node running old software")
await peer.close()
@@ -667,7 +686,7 @@ async def on_connect(self, peer: WSChiaConnection):
if self.wallet_peers is not None:
await self.wallet_peers.on_connect(peer)
- async def perform_atomic_rollback(self, fork_height: int, cache: Optional[PeerRequestCache] = None):
+ async def perform_atomic_rollback(self, fork_height: int, cache: Optional[PeerRequestCache] = None) -> None:
self.log.info(f"perform_atomic_rollback to {fork_height}")
# this is to start a write transaction
async with self.wallet_state_manager.db_wrapper.writer():
@@ -700,7 +719,7 @@ async def long_sync(
fork_height: int,
*,
rollback: bool,
- ):
+ ) -> None:
"""
Sync algorithm:
- Download and verify weight proof (if not trusted)
@@ -721,7 +740,7 @@ def is_new_state_update(cs: CoinState) -> bool:
return False
trusted: bool = self.is_trusted(full_node)
- self.log.info(f"Starting sync trusted: {trusted} to peer {full_node.peer_host}")
+ self.log.info(f"Starting sync trusted: {trusted} to peer {full_node.peer_info.host}")
start_time = time.time()
if rollback:
@@ -741,7 +760,7 @@ def is_new_state_update(cs: CoinState) -> bool:
for chunk in chunks(list(not_checked_puzzle_hashes), 1000):
ph_update_res: List[CoinState] = await subscribe_to_phs(chunk, full_node, 0)
ph_update_res = list(filter(is_new_state_update, ph_update_res))
- if not await self.receive_state_from_peer(ph_update_res, full_node):
+ if not await self.add_states_from_peer(ph_update_res, full_node):
# If something goes wrong, abort sync
return
already_checked_ph.update(not_checked_puzzle_hashes)
@@ -759,7 +778,7 @@ def is_new_state_update(cs: CoinState) -> bool:
for chunk in chunks(list(not_checked_coin_ids), 1000):
c_update_res: List[CoinState] = await subscribe_to_coin_updates(chunk, full_node, 0)
- if not await self.receive_state_from_peer(c_update_res, full_node):
+ if not await self.add_states_from_peer(c_update_res, full_node):
# If something goes wrong, abort sync
return
already_checked_coin_ids.update(not_checked_coin_ids)
@@ -778,7 +797,7 @@ def is_new_state_update(cs: CoinState) -> bool:
self.log.info(f"Sync (trusted: {trusted}) duration was: {time.time() - start_time}")
- async def receive_state_from_peer(
+ async def add_states_from_peer(
self,
items_input: List[CoinState],
peer: WSChiaConnection,
@@ -818,18 +837,18 @@ async def receive_state_from_peer(
cache.clear_after_height(fork_height)
self.log.info(f"clear_after_height {fork_height} for peer {peer}")
- all_tasks: List[asyncio.Task] = []
+ all_tasks: List[asyncio.Task[None]] = []
target_concurrent_tasks: int = 30
# Ensure the list is sorted
before = len(items_input)
- items = await self.wallet_state_manager.filter_spam(list(sorted(items_input, key=last_change_height_cs)))
+ items = await self.wallet_state_manager.filter_spam(sort_coin_states(items_input))
num_filtered = before - len(items)
if num_filtered > 0:
self.log.info(f"Filtered {num_filtered} spam transactions")
- async def receive_and_validate(inner_states: List[CoinState], inner_idx_start: int):
+ async def validate_and_add(inner_states: List[CoinState], inner_idx_start: int) -> None:
try:
assert self.validation_semaphore is not None
async with self.validation_semaphore:
@@ -849,20 +868,11 @@ async def receive_and_validate(inner_states: List[CoinState], inner_idx_start: i
f"new coin state received ({inner_idx_start}-"
f"{inner_idx_start + len(inner_states) - 1}/ {len(items)})"
)
- try:
- await self.wallet_state_manager.add_coin_states(valid_states, peer, fork_height)
- except Exception as e:
- tb = traceback.format_exc()
- self.log.error(f"Exception while adding state: {e} {tb}")
- else:
- await self.wallet_state_manager.blockchain.clean_block_records()
-
+ await self.wallet_state_manager.add_coin_states(valid_states, peer, fork_height)
except Exception as e:
tb = traceback.format_exc()
- if self._shut_down:
- self.log.debug(f"Shutting down while adding state : {e} {tb}")
- else:
- self.log.error(f"Exception while adding state: {e} {tb}")
+ log_level = logging.DEBUG if peer.closed or self._shut_down else logging.ERROR
+ self.log.log(log_level, f"validate_and_add failed - exception: {e}, traceback: {tb}")
idx = 1
# Keep chunk size below 1000 just in case, windows has sqlite limits of 999 per query
@@ -874,21 +884,14 @@ async def receive_and_validate(inner_states: List[CoinState], inner_idx_start: i
await asyncio.gather(*all_tasks)
return False
if peer.peer_node_id not in self.server.all_connections:
- self.log.error(f"Disconnected from peer {peer.peer_node_id} host {peer.peer_host}")
+ self.log.error(f"Disconnected from peer {peer.peer_node_id} host {peer.peer_info.host}")
await asyncio.gather(*all_tasks)
return False
if trusted:
async with self.wallet_state_manager.db_wrapper.writer():
- try:
- self.log.info(f"new coin state received ({idx}-{idx + len(states) - 1}/ {len(items)})")
- await self.wallet_state_manager.add_coin_states(states, peer, fork_height)
- except Exception as e:
- tb = traceback.format_exc()
- self.log.error(f"Error adding states.. {e} {tb}")
+ self.log.info(f"new coin state received ({idx}-{idx + len(states) - 1}/ {len(items)})")
+ if not await self.wallet_state_manager.add_coin_states(states, peer, fork_height):
return False
- else:
- await self.wallet_state_manager.blockchain.clean_block_records()
-
else:
while len(all_tasks) >= target_concurrent_tasks:
all_tasks = [task for task in all_tasks if not task.done()]
@@ -897,7 +900,7 @@ async def receive_and_validate(inner_states: List[CoinState], inner_idx_start: i
self.log.info("Terminating receipt and validation due to shut down request")
await asyncio.gather(*all_tasks)
return False
- all_tasks.append(asyncio.create_task(receive_and_validate(states, idx)))
+ all_tasks.append(asyncio.create_task(validate_and_add(states, idx)))
idx += len(states)
still_connected = self._server is not None and peer.peer_node_id in self.server.all_connections
@@ -922,7 +925,7 @@ async def is_peer_synced(self, peer: WSChiaConnection, height: uint32) -> Option
return None
return latest_timestamp
- def is_trusted(self, peer) -> bool:
+ def is_trusted(self, peer: WSChiaConnection) -> bool:
return self.server.is_trusted_peer(peer, self.config.get("trusted_peers", {}))
def add_state_to_race_cache(self, header_hash: bytes32, height: uint32, coin_state: CoinState) -> None:
@@ -939,7 +942,7 @@ def add_state_to_race_cache(self, header_hash: bytes32, height: uint32, coin_sta
self.race_cache[header_hash] = set()
self.race_cache[header_hash].add(coin_state)
- async def state_update_received(self, request: wallet_protocol.CoinStateUpdate, peer: WSChiaConnection) -> None:
+ async def state_update_received(self, request: CoinStateUpdate, peer: WSChiaConnection) -> None:
# This gets called every time there is a new coin or puzzle hash change in the DB
# that is of interest to this wallet. It is not guaranteed to come for every height. This message is guaranteed
# to come before the corresponding new_peak for each height. We handle this differently for trusted and
@@ -948,7 +951,7 @@ async def state_update_received(self, request: wallet_protocol.CoinStateUpdate,
self.log.info(f"request coin: {coin.coin.name().hex()}{coin}")
async with self.wallet_state_manager.lock:
- await self.receive_state_from_peer(
+ await self.add_states_from_peer(
request.items,
peer,
request.fork_height,
@@ -956,13 +959,13 @@ async def state_update_received(self, request: wallet_protocol.CoinStateUpdate,
request.peak_hash,
)
- def get_full_node_peer(self) -> Optional[WSChiaConnection]:
+ def get_full_node_peer(self) -> WSChiaConnection:
"""
Get a full node, preferring synced & trusted > synced & untrusted > unsynced & trusted > unsynced & untrusted
"""
full_nodes: List[WSChiaConnection] = self.get_full_node_peers_in_order()
if len(full_nodes) == 0:
- return None
+ raise ValueError("No peer connected")
return full_nodes[0]
def get_full_node_peers_in_order(self) -> List[WSChiaConnection]:
@@ -1013,7 +1016,7 @@ async def get_timestamp_for_height(self, height: uint32) -> uint64:
raise PeerRequestException("Error fetching timestamp from all peers")
- async def new_peak_wallet(self, new_peak: wallet_protocol.NewPeakWallet, peer: WSChiaConnection):
+ async def new_peak_wallet(self, new_peak: NewPeakWallet, peer: WSChiaConnection) -> None:
if self._wallet_state_manager is None:
# When logging out of wallet
self.log.debug("state manager is None (shutdown)")
@@ -1025,7 +1028,7 @@ async def new_peak_wallet(self, new_peak: wallet_protocol.NewPeakWallet, peer: W
self.log.debug("skip block with lower weight.")
return
- request = wallet_protocol.RequestBlockHeader(new_peak.height)
+ request = RequestBlockHeader(new_peak.height)
response: Optional[RespondBlockHeader] = await peer.call_api(FullNodeAPI.request_block_header, request)
if response is None:
self.log.warning(f"Peer {peer.get_peer_info()} did not respond in time.")
@@ -1067,7 +1070,9 @@ async def new_peak_wallet(self, new_peak: wallet_protocol.NewPeakWallet, peer: W
async with self.wallet_state_manager.lock:
await self.wallet_state_manager.new_peak(new_peak)
- async def new_peak_from_trusted(self, new_peak_hb: HeaderBlock, latest_timestamp: uint64, peer: WSChiaConnection):
+ async def new_peak_from_trusted(
+ self, new_peak_hb: HeaderBlock, latest_timestamp: uint64, peer: WSChiaConnection
+ ) -> None:
async with self.wallet_state_manager.set_sync_mode(new_peak_hb.height) as current_height:
await self.wallet_state_manager.blockchain.set_peak_block(new_peak_hb, latest_timestamp)
# Sync to trusted node if we haven't done so yet. As long as we have synced once (and not
@@ -1111,37 +1116,30 @@ async def new_peak_from_untrusted(self, new_peak_hb: HeaderBlock, peer: WSChiaCo
return False
return True
- async def long_sync_from_untrusted(self, syncing: bool, new_peak_hb: HeaderBlock, peer: WSChiaConnection):
+ async def long_sync_from_untrusted(self, syncing: bool, new_peak_hb: HeaderBlock, peer: WSChiaConnection) -> None:
current_height: uint32 = await self.wallet_state_manager.blockchain.get_finished_sync_up_to()
- weight_proof, summaries, block_records = await self.fetch_and_validate_the_weight_proof(peer, new_peak_hb)
- old_proof = self.wallet_state_manager.blockchain.synced_weight_proof
- # In this case we will not rollback so it's OK to check some older updates as well, to ensure
- # that no recent transactions are being hidden.
- fork_point: int = 0
- if syncing:
- # This usually happens the first time we start up the wallet. We roll back slightly to be
- # safe, but we don't want to rollback too much (hence 16)
- fork_point = max(0, current_height - 16)
- if old_proof is not None:
- # If the weight proof fork point is in the past, rollback more to ensure we don't have duplicate
- # state.
- fork_point = min(fork_point, get_wp_fork_point(self.constants, old_proof, weight_proof))
-
- await self.wallet_state_manager.blockchain.new_valid_weight_proof(weight_proof, block_records)
+ fork_point_weight_proof = await self.fetch_and_update_weight_proof(peer, new_peak_hb)
+ # This usually happens the first time we start up the wallet. We roll back slightly to be
+ # safe, but we don't want to rollback too much (hence 16)
+ fork_point_rollback: int = max(0, current_height - 16)
+ # If the weight proof fork point is in the past, rollback more to ensure we don't have duplicate
+ fork_point_syncing = min(fork_point_rollback, fork_point_weight_proof)
if syncing:
async with self.wallet_state_manager.set_sync_mode(new_peak_hb.height):
- await self.long_sync(new_peak_hb.height, peer, fork_point, rollback=True)
+ await self.long_sync(new_peak_hb.height, peer, fork_point_syncing, rollback=True)
return
# we exit earlier in the case where syncing is False and a Secondary sync is running
assert self._secondary_peer_sync_task is None or self._secondary_peer_sync_task.done()
self.log.info("Secondary peer syncing")
+ # In this case we will not rollback so it's OK to check some older updates as well, to ensure
+ # that no recent transactions are being hidden.
self._secondary_peer_sync_task = asyncio.create_task(
- self.long_sync(new_peak_hb.height, peer, fork_point, rollback=False)
+ self.long_sync(new_peak_hb.height, peer, 0, rollback=False)
)
- async def sync_from_untrusted_close_to_peak(self, new_peak_hb, peer) -> bool:
+ async def sync_from_untrusted_close_to_peak(self, new_peak_hb: HeaderBlock, peer: WSChiaConnection) -> bool:
async with self.wallet_state_manager.lock:
peak_hb = await self.wallet_state_manager.blockchain.get_peak_block()
if peak_hb is None or new_peak_hb.weight > peak_hb.weight:
@@ -1158,7 +1156,7 @@ async def sync_from_untrusted_close_to_peak(self, new_peak_hb, peer) -> bool:
ph_updates: List[CoinState] = await subscribe_to_phs(phs, peer, uint32(0))
coin_updates: List[CoinState] = await subscribe_to_coin_updates(all_coin_ids, peer, uint32(0))
peer_new_peak_height, peer_new_peak_hash = self.node_peaks[peer.peer_node_id]
- success = await self.receive_state_from_peer(
+ success = await self.add_states_from_peer(
ph_updates + coin_updates,
peer,
height=peer_new_peak_height,
@@ -1176,7 +1174,7 @@ async def sync_from_untrusted_close_to_peak(self, new_peak_hb, peer) -> bool:
header_hash = self.wallet_state_manager.blockchain.height_to_hash(uint32(potential_height))
if header_hash in self.race_cache:
self.log.info(f"Receiving race state: {self.race_cache[header_hash]}")
- await self.receive_state_from_peer(list(self.race_cache[header_hash]), peer)
+ await self.add_states_from_peer(list(self.race_cache[header_hash]), peer)
self.wallet_state_manager.state_changed("new_block")
self.log.info(f"Finished processing new peak of {new_peak_hb.height}")
@@ -1194,7 +1192,7 @@ async def wallet_short_sync_backtrack(self, header_block: HeaderBlock, peer: WSC
fork_height = header_block.height - 1
while not self.wallet_state_manager.blockchain.contains_block(top.prev_header_hash) and top.height > 0:
- request_prev = wallet_protocol.RequestBlockHeader(uint32(top.height - 1))
+ request_prev = RequestBlockHeader(uint32(top.height - 1))
response_prev: Optional[RespondBlockHeader] = await peer.call_api(
FullNodeAPI.request_block_header, request_prev
)
@@ -1218,20 +1216,18 @@ async def wallet_short_sync_backtrack(self, header_block: HeaderBlock, peer: WSC
assert header_block.weight >= peak.weight
for block in blocks:
# Set blockchain to the latest peak
- res, err = await self.wallet_state_manager.blockchain.receive_block(block)
+ res, err = await self.wallet_state_manager.blockchain.add_block(block)
if res == AddBlockResult.INVALID_BLOCK:
raise ValueError(err)
return fork_height
- async def update_ui(self):
+ async def update_ui(self) -> None:
for wallet_id, wallet in self.wallet_state_manager.wallets.items():
self.wallet_state_manager.state_changed("coin_removed", wallet_id)
self.wallet_state_manager.state_changed("coin_added", wallet_id)
- async def fetch_and_validate_the_weight_proof(
- self, peer: WSChiaConnection, peak: HeaderBlock
- ) -> Tuple[WeightProof, List[SubEpochSummary], List[BlockRecord]]:
+ async def fetch_and_update_weight_proof(self, peer: WSChiaConnection, peak: HeaderBlock) -> int:
assert self._weight_proof_handler is not None
weight_request = RequestProofOfWeight(peak.height, peak.header_hash)
wp_timeout = self.config.get("weight_proof_timeout", 360)
@@ -1243,7 +1239,6 @@ async def fetch_and_validate_the_weight_proof(
if weight_proof_response is None:
raise Exception("weight proof response was none")
- start_validation = time.time()
weight_proof = weight_proof_response.wp
if weight_proof.recent_chain_data[-1].height != peak.height:
@@ -1253,24 +1248,12 @@ async def fetch_and_validate_the_weight_proof(
if weight_proof.recent_chain_data[-1].header_hash != peak.header_hash:
raise Exception("weight proof peak hash does not match peak")
- if weight_proof.get_hash() in self.valid_wp_cache:
- valid, fork_point, summaries, block_records = self.valid_wp_cache[weight_proof.get_hash()]
- else:
- old_proof = self.wallet_state_manager.blockchain.synced_weight_proof
- fork_point = get_wp_fork_point(self.constants, old_proof, weight_proof)
- start_validation = time.time()
- (
- valid,
- summaries,
- block_records,
- ) = await self._weight_proof_handler.validate_weight_proof(weight_proof, False, old_proof)
- if not valid:
- raise Exception("weight proof failed validation")
- self.valid_wp_cache[weight_proof.get_hash()] = valid, fork_point, summaries, block_records
-
- end_validation = time.time()
- self.log.info(f"It took {end_validation - start_validation} time to validate the weight proof")
- return weight_proof, summaries, block_records
+ old_proof = self.wallet_state_manager.blockchain.synced_weight_proof
+ block_records = await self._weight_proof_handler.validate_weight_proof(weight_proof, False, old_proof)
+
+ await self.wallet_state_manager.blockchain.new_valid_weight_proof(weight_proof, block_records)
+
+ return get_wp_fork_point(self.constants, old_proof, weight_proof)
async def get_puzzle_hashes_to_subscribe(self) -> List[bytes32]:
all_puzzle_hashes = await self.wallet_state_manager.puzzle_store.get_all_puzzle_hashes()
@@ -1297,6 +1280,8 @@ async def validate_received_state_from_peer(
Returns all state that is valid and included in the blockchain proved by the weight proof. If return_old_states
is False, only new states that are not in the coin_store are returned.
"""
+ if peer.closed:
+ return False
# Only use the cache if we are talking about states before the fork point. If we are evaluating something
# in a reorg, we cannot use the cache, since we don't know if it's actually in the new chain after the reorg.
if can_use_peer_request_cache(coin_state, peer_request_cache, fork_height):
@@ -1503,10 +1488,8 @@ async def validate_block_inclusion(
start, end, peer_request_cache, all_peers
)
if blocks is None:
- if self._shut_down:
- self.log.debug(f"Shutting down, block fetching from: {start} to {end} canceled.")
- else:
- self.log.error(f"Error fetching blocks {start} {end}")
+ log_level = logging.DEBUG if self._shut_down or peer.closed else logging.ERROR
+ self.log.log(log_level, f"Error fetching blocks {start} {end}")
return False
if compare_to_recent and weight_proof.recent_chain_data[0].header_hash != blocks[-1].header_hash:
@@ -1583,27 +1566,12 @@ async def validate_block_inclusion(
peer_request_cache.add_to_blocks_validated(reward_chain_hash, height)
return True
- async def fetch_puzzle_solution(self, height: uint32, coin: Coin, peer: WSChiaConnection) -> CoinSpend:
- solution_response = await peer.call_api(
- FullNodeAPI.request_puzzle_solution, wallet_protocol.RequestPuzzleSolution(coin.name(), height)
- )
- if solution_response is None or not isinstance(solution_response, wallet_protocol.RespondPuzzleSolution):
- raise PeerRequestException(f"Was not able to obtain solution {solution_response}")
- assert solution_response.response.puzzle.get_tree_hash() == coin.puzzle_hash
- assert solution_response.response.coin_name == coin.name()
-
- return CoinSpend(
- coin,
- solution_response.response.puzzle,
- solution_response.response.solution,
- )
-
async def get_coin_state(
self, coin_names: List[bytes32], peer: WSChiaConnection, fork_height: Optional[uint32] = None
) -> List[CoinState]:
- msg = wallet_protocol.RegisterForCoinUpdates(coin_names, uint32(0))
+ msg = RegisterForCoinUpdates(coin_names, uint32(0))
coin_state: Optional[RespondToCoinUpdates] = await peer.call_api(FullNodeAPI.register_interest_in_coin, msg)
- if coin_state is None or not isinstance(coin_state, wallet_protocol.RespondToCoinUpdates):
+ if coin_state is None or not isinstance(coin_state, RespondToCoinUpdates):
raise PeerRequestException(f"Was not able to get states for {coin_names}")
if not self.is_trusted(peer):
@@ -1621,10 +1589,10 @@ async def get_coin_state(
async def fetch_children(
self, coin_name: bytes32, peer: WSChiaConnection, fork_height: Optional[uint32] = None
) -> List[CoinState]:
- response: Optional[wallet_protocol.RespondChildren] = await peer.call_api(
- FullNodeAPI.request_children, wallet_protocol.RequestChildren(coin_name)
+ response: Optional[RespondChildren] = await peer.call_api(
+ FullNodeAPI.request_children, RequestChildren(coin_name)
)
- if response is None or not isinstance(response, wallet_protocol.RespondChildren):
+ if response is None or not isinstance(response, RespondChildren):
raise PeerRequestException(f"Was not able to obtain children {response}")
if not self.is_trusted(peer):
@@ -1638,11 +1606,39 @@ async def fetch_children(
return response.coin_states
# For RPC only. You should use wallet_state_manager.add_pending_transaction for normal wallet business.
- async def push_tx(self, spend_bundle):
- msg = make_msg(
- ProtocolMessageTypes.send_transaction,
- wallet_protocol.SendTransaction(spend_bundle),
- )
+ async def push_tx(self, spend_bundle: SpendBundle) -> None:
+ msg = make_msg(ProtocolMessageTypes.send_transaction, SendTransaction(spend_bundle))
full_nodes = self.server.get_connections(NodeType.FULL_NODE)
for peer in full_nodes:
await peer.send_message(msg)
+
+ async def _update_balance_cache(self, wallet_id: uint32) -> None:
+ assert self.wallet_state_manager.lock.locked(), "WalletStateManager.lock required"
+ wallet = self.wallet_state_manager.wallets[wallet_id]
+ unspent_records = await self.wallet_state_manager.coin_store.get_unspent_coins_for_wallet(wallet_id)
+ balance = await wallet.get_confirmed_balance(unspent_records)
+ pending_balance = await wallet.get_unconfirmed_balance(unspent_records)
+ spendable_balance = await wallet.get_spendable_balance(unspent_records)
+ pending_change = await wallet.get_pending_change_balance()
+ max_send_amount = await wallet.get_max_send_amount(unspent_records)
+
+ unconfirmed_removals: Dict[bytes32, Coin] = await wallet.wallet_state_manager.unconfirmed_removals_for_wallet(
+ wallet_id
+ )
+ self._balance_cache[wallet_id] = Balance(
+ confirmed_wallet_balance=balance,
+ unconfirmed_wallet_balance=pending_balance,
+ spendable_balance=spendable_balance,
+ pending_change=pending_change,
+ max_send_amount=max_send_amount,
+ unspent_coin_count=uint32(len(unspent_records)),
+ pending_coin_removal_count=uint32(len(unconfirmed_removals)),
+ )
+
+ async def get_balance(self, wallet_id: uint32) -> Balance:
+ self.log.debug(f"get_balance - wallet_id: {wallet_id}")
+ if not self.wallet_state_manager.sync_mode:
+ self.log.debug(f"get_balance - Updating cache for {wallet_id}")
+ async with self.wallet_state_manager.lock:
+ await self._update_balance_cache(wallet_id)
+ return self._balance_cache.get(wallet_id, Balance())
diff --git a/chia/wallet/wallet_pool_store.py b/chia/wallet/wallet_pool_store.py
--- a/chia/wallet/wallet_pool_store.py
+++ b/chia/wallet/wallet_pool_store.py
@@ -113,3 +113,8 @@ async def rollback(self, height: int, wallet_id_arg: int) -> None:
"DELETE FROM pool_state_transitions WHERE height>? AND wallet_id=?", (height, wallet_id_arg)
)
await cursor.close()
+
+ async def delete_wallet(self, wallet_id: uint32) -> None:
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ cursor = await conn.execute("DELETE FROM pool_state_transitions WHERE wallet_id=?", (wallet_id,))
+ await cursor.close()
diff --git a/chia/wallet/wallet_puzzle_store.py b/chia/wallet/wallet_puzzle_store.py
--- a/chia/wallet/wallet_puzzle_store.py
+++ b/chia/wallet/wallet_puzzle_store.py
@@ -2,7 +2,7 @@
import asyncio
import logging
-from typing import Dict, List, Optional, Set, Tuple
+from typing import Dict, List, Optional, Set
from blspy import G1Element
@@ -11,7 +11,7 @@
from chia.util.ints import uint32
from chia.util.lru_cache import LRUCache
from chia.wallet.derivation_record import DerivationRecord
-from chia.wallet.util.wallet_types import WalletType
+from chia.wallet.util.wallet_types import WalletIdentifier, WalletType
log = logging.getLogger(__name__)
@@ -25,7 +25,7 @@ class WalletPuzzleStore:
lock: asyncio.Lock
db_wrapper: DBWrapper2
- wallet_info_for_ph_cache: LRUCache
+ wallet_identifier_cache: LRUCache
# maps wallet_id -> last_derivation_index
last_wallet_derivation_index: Dict[uint32, uint32]
last_derivation_index: Optional[uint32]
@@ -64,7 +64,7 @@ async def create(cls, db_wrapper: DBWrapper2):
# the lock is locked by the users of this class
self.lock = asyncio.Lock()
- self.wallet_info_for_ph_cache = LRUCache(100)
+ self.wallet_identifier_cache = LRUCache(100)
self.last_derivation_index = None
self.last_wallet_derivation_index = {}
return self
@@ -267,12 +267,12 @@ async def index_for_puzzle_hash_and_wallet(self, puzzle_hash: bytes32, wallet_id
return None
- async def wallet_info_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[Tuple[int, WalletType]]:
+ async def get_wallet_identifier_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[WalletIdentifier]:
"""
Returns the derivation path for the puzzle_hash.
Returns None if not present.
"""
- cached = self.wallet_info_for_ph_cache.get(puzzle_hash)
+ cached = self.wallet_identifier_cache.get(puzzle_hash)
if cached is not None:
return cached
@@ -282,8 +282,9 @@ async def wallet_info_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[Tu
)
if row is not None:
- self.wallet_info_for_ph_cache.put(puzzle_hash, (row[1], WalletType(row[0])))
- return row[1], WalletType(row[0])
+ wallet_identifier = WalletIdentifier(uint32(row[1]), WalletType(row[0]))
+ self.wallet_identifier_cache.put(puzzle_hash, wallet_identifier)
+ return wallet_identifier
return None
@@ -357,3 +358,24 @@ async def get_unused_derivation_path(self) -> Optional[uint32]:
return uint32(row[0])
return None
+
+ async def delete_wallet(self, wallet_id: uint32) -> None:
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ # First fetch all puzzle hashes since we need them to drop them from the cache
+ rows = await conn.execute_fetchall(
+ "SELECT puzzle_hash FROM derivation_paths WHERE wallet_id=?", (wallet_id,)
+ )
+ cursor = await conn.execute("DELETE FROM derivation_paths WHERE wallet_id=?;", (wallet_id,))
+ await cursor.close()
+ # Clear caches
+ puzzle_hashes = set(bytes32.fromhex(row[0]) for row in rows)
+ for puzzle_hash in puzzle_hashes:
+ try:
+ self.wallet_identifier_cache.remove(puzzle_hash)
+ except KeyError:
+ pass
+ try:
+ self.last_wallet_derivation_index.pop(wallet_id)
+ except KeyError:
+ pass
+ self.last_derivation_index = None
diff --git a/chia/wallet/wallet_state_manager.py b/chia/wallet/wallet_state_manager.py
--- a/chia/wallet/wallet_state_manager.py
+++ b/chia/wallet/wallet_state_manager.py
@@ -1,14 +1,27 @@
from __future__ import annotations
import asyncio
-import json
import logging
import multiprocessing.context
import time
+import traceback
from contextlib import asynccontextmanager
from pathlib import Path
from secrets import token_bytes
-from typing import Any, AsyncIterator, Callable, Dict, Iterator, List, Optional, Set, Tuple, Type, TypeVar
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ AsyncIterator,
+ Callable,
+ Dict,
+ Iterator,
+ List,
+ Optional,
+ Set,
+ Tuple,
+ Type,
+ TypeVar,
+)
import aiosqlite
from blspy import G1Element, PrivateKey
@@ -20,8 +33,7 @@
from chia.data_layer.dl_wallet_store import DataLayerStore
from chia.pools.pool_puzzles import SINGLETON_LAUNCHER_HASH, solution_to_pool_state
from chia.pools.pool_wallet import PoolWallet
-from chia.protocols import wallet_protocol
-from chia.protocols.wallet_protocol import CoinState
+from chia.protocols.wallet_protocol import CoinState, NewPeakWallet
from chia.rpc.rpc_server import StateChangedProtocol
from chia.server.outbound_message import NodeType
from chia.server.server import ChiaServer
@@ -52,11 +64,9 @@
master_sk_to_wallet_sk_unhardened,
master_sk_to_wallet_sk_unhardened_intermediate,
)
-from chia.wallet.did_wallet.did_info import DIDInfo
from chia.wallet.did_wallet.did_wallet import DIDWallet
from chia.wallet.did_wallet.did_wallet_puzzles import DID_INNERPUZ_MOD, match_did_puzzle
from chia.wallet.key_val_store import KeyValStore
-from chia.wallet.nft_wallet.nft_info import NFTWalletInfo
from chia.wallet.nft_wallet.nft_puzzles import get_metadata_and_phs, get_new_owner_did
from chia.wallet.nft_wallet.nft_wallet import NFTWallet
from chia.wallet.nft_wallet.uncurry_nft import UncurriedNFT
@@ -64,16 +74,24 @@
from chia.wallet.outer_puzzles import AssetType
from chia.wallet.puzzle_drivers import PuzzleInfo
from chia.wallet.puzzles.cat_loader import CAT_MOD, CAT_MOD_HASH
-from chia.wallet.singleton import create_fullpuz
+from chia.wallet.singleton import create_singleton_puzzle
from chia.wallet.trade_manager import TradeManager
from chia.wallet.trading.trade_status import TradeStatus
from chia.wallet.transaction_record import TransactionRecord
from chia.wallet.uncurried_puzzle import uncurry_puzzle
from chia.wallet.util.address_type import AddressType
from chia.wallet.util.compute_hints import compute_coin_hints
+from chia.wallet.util.query_filter import HashFilter
from chia.wallet.util.transaction_type import TransactionType
-from chia.wallet.util.wallet_sync_utils import PeerRequestException, last_change_height_cs
-from chia.wallet.util.wallet_types import WalletType
+from chia.wallet.util.wallet_sync_utils import (
+ PeerRequestException,
+ fetch_coin_spend_for_coin_state,
+ last_change_height_cs,
+)
+from chia.wallet.util.wallet_types import WalletIdentifier, WalletType
+from chia.wallet.vc_wallet.vc_drivers import VerifiedCredential
+from chia.wallet.vc_wallet.vc_store import VCStore
+from chia.wallet.vc_wallet.vc_wallet import VCWallet
from chia.wallet.wallet import Wallet
from chia.wallet.wallet_blockchain import WalletBlockchain
from chia.wallet.wallet_coin_record import WalletCoinRecord
@@ -90,14 +108,21 @@
TWalletType = TypeVar("TWalletType", bound=WalletProtocol)
+if TYPE_CHECKING:
+ from chia.wallet.wallet_node import WalletNode
+
+
+PendingTxCallback = Callable[[], None]
+
class WalletStateManager:
constants: ConsensusConstants
- config: Dict
+ config: Dict[str, Any]
tx_store: WalletTransactionStore
puzzle_store: WalletPuzzleStore
user_store: WalletUserStore
nft_store: WalletNftStore
+ vc_store: VCStore
basic_store: KeyValStore
# Makes sure only one asyncio thread is changing the blockchain state at one time
@@ -109,7 +134,7 @@ class WalletStateManager:
_sync_target: Optional[uint32]
state_changed_callback: Optional[StateChangedProtocol] = None
- pending_tx_callback: Optional[Callable]
+ pending_tx_callback: Optional[PendingTxCallback]
db_path: Path
db_wrapper: DBWrapper2
@@ -126,7 +151,7 @@ class WalletStateManager:
multiprocessing_context: multiprocessing.context.BaseContext
server: ChiaServer
root_path: Path
- wallet_node: Any
+ wallet_node: WalletNode
pool_store: WalletPoolStore
dl_store: DataLayerStore
default_cats: Dict[str, Any]
@@ -136,20 +161,19 @@ class WalletStateManager:
@staticmethod
async def create(
private_key: PrivateKey,
- config: Dict,
+ config: Dict[str, Any],
db_path: Path,
constants: ConsensusConstants,
server: ChiaServer,
root_path: Path,
- wallet_node,
- name: str = None,
- ):
+ wallet_node: WalletNode,
+ ) -> WalletStateManager:
self = WalletStateManager()
self.config = config
self.constants = constants
self.server = server
self.root_path = root_path
- self.log = logging.getLogger(name if name else __name__)
+ self.log = logging.getLogger(__name__)
self.lock = asyncio.Lock()
self.log.debug(f"Starting in db path: {db_path}")
@@ -175,6 +199,7 @@ async def create(
self.puzzle_store = await WalletPuzzleStore.create(self.db_wrapper)
self.user_store = await WalletUserStore.create(self.db_wrapper)
self.nft_store = await WalletNftStore.create(self.db_wrapper)
+ self.vc_store = await VCStore.create(self.db_wrapper)
self.basic_store = await KeyValStore.create(self.db_wrapper)
self.trade_manager = await TradeManager.create(self, self.db_wrapper)
self.notification_manager = await NotificationManager.create(self, self.db_wrapper)
@@ -235,7 +260,9 @@ async def create(
wallet_info,
)
elif wallet_type == WalletType.DATA_LAYER:
- wallet = await DataLayerWallet.create(
+ wallet = await DataLayerWallet.create(self, wallet_info)
+ elif wallet_type == WalletType.VC: # pragma: no cover
+ wallet = await VCWallet.create(
self,
self.main_wallet,
wallet_info,
@@ -272,10 +299,10 @@ def get_wallet(self, id: uint32, required_type: Type[TWalletType]) -> TWalletTyp
async def create_more_puzzle_hashes(
self,
from_zero: bool = False,
- mark_existing_as_used=True,
+ mark_existing_as_used: bool = True,
up_to_index: Optional[uint32] = None,
num_additional_phs: Optional[int] = None,
- ):
+ ) -> None:
"""
For all wallets in the user store, generates the first few puzzle hashes so
that we can restore the wallet from only the private keys.
@@ -382,7 +409,7 @@ async def create_more_puzzle_hashes(
self.log.info(f"Updating last used derivation index: {unused - 1}")
await self.puzzle_store.set_used_up_to(uint32(unused - 1))
- async def update_wallet_puzzle_hashes(self, wallet_id):
+ async def update_wallet_puzzle_hashes(self, wallet_id: uint32) -> None:
derivation_paths: List[DerivationRecord] = []
target_wallet = self.wallets[wallet_id]
last: Optional[uint32] = await self.puzzle_store.get_last_derivation_path_for_wallet(wallet_id)
@@ -397,21 +424,21 @@ async def update_wallet_puzzle_hashes(self, wallet_id):
for index in range(unused, last):
# Since DID are not released yet we can assume they are only using unhardened keys derivation
pubkey: G1Element = self.get_public_key_unhardened(uint32(index))
- puzzlehash: Optional[bytes32] = target_wallet.puzzle_hash_for_pk(pubkey)
+ puzzlehash = target_wallet.puzzle_hash_for_pk(pubkey)
self.log.info(f"Generating public key at index {index} puzzle hash {puzzlehash.hex()}")
derivation_paths.append(
DerivationRecord(
uint32(index),
puzzlehash,
pubkey,
- target_wallet.wallet_info.type,
+ WalletType(target_wallet.wallet_info.type),
uint32(target_wallet.wallet_info.id),
False,
)
)
await self.puzzle_store.add_derivation_paths(derivation_paths)
- async def get_unused_derivation_record(self, wallet_id: uint32, *, hardened=False) -> DerivationRecord:
+ async def get_unused_derivation_record(self, wallet_id: uint32, *, hardened: bool = False) -> DerivationRecord:
"""
Creates a puzzle hash for the given wallet, and then makes more puzzle hashes
for every wallet to ensure we always have more in the database. Never reusue the
@@ -449,19 +476,21 @@ async def get_current_derivation_record_for_wallet(self, wallet_id: uint32) -> O
)
return current
- def set_callback(self, callback: Callable):
+ def set_callback(self, callback: StateChangedProtocol) -> None:
"""
Callback to be called when the state of the wallet changes.
"""
self.state_changed_callback = callback
- def set_pending_callback(self, callback: Callable):
+ def set_pending_callback(self, callback: PendingTxCallback) -> None:
"""
Callback to be called when new pending transaction enters the store
"""
self.pending_tx_callback = callback
- def state_changed(self, state: str, wallet_id: Optional[int] = None, data_object: Optional[Dict[str, Any]] = None):
+ def state_changed(
+ self, state: str, wallet_id: Optional[int] = None, data_object: Optional[Dict[str, Any]] = None
+ ) -> None:
"""
Calls the callback if it's present.
"""
@@ -517,9 +546,9 @@ async def set_sync_mode(self, target_height: uint32) -> AsyncIterator[uint32]:
if self.log.level == logging.DEBUG:
self.log.debug(f"set_sync_mode enter {await self.blockchain.get_finished_sync_up_to()}-{target_height}")
async with self.lock:
+ self._sync_target = target_height
start_time = time.time()
start_height = await self.blockchain.get_finished_sync_up_to()
- self._sync_target = target_height
self.log.info(f"set_sync_mode syncing - range: {start_height}-{target_height}")
self.state_changed("sync_changed")
try:
@@ -529,7 +558,6 @@ async def set_sync_mode(self, target_height: uint32) -> AsyncIterator[uint32]:
f"set_sync_mode failed - range: {start_height}-{target_height}, seconds: {time.time() - start_time}"
)
finally:
- self._sync_target = None
self.state_changed("sync_changed")
if self.log.level == logging.DEBUG:
self.log.debug(
@@ -537,8 +565,11 @@ async def set_sync_mode(self, target_height: uint32) -> AsyncIterator[uint32]:
f"get_finished_sync_up_to: {await self.blockchain.get_finished_sync_up_to()}, "
f"seconds: {time.time() - start_time}"
)
+ self._sync_target = None
- async def get_confirmed_spendable_balance_for_wallet(self, wallet_id: int, unspent_records=None) -> uint128:
+ async def get_confirmed_spendable_balance_for_wallet(
+ self, wallet_id: int, unspent_records: Optional[Set[WalletCoinRecord]] = None
+ ) -> uint128:
"""
Returns the balance amount of all coins that are spendable.
"""
@@ -555,16 +586,8 @@ async def does_coin_belong_to_wallet(self, coin: Coin, wallet_id: int) -> bool:
"""
Returns true if we have the key for this coin.
"""
- info = await self.puzzle_store.wallet_info_for_puzzle_hash(coin.puzzle_hash)
-
- if info is None:
- return False
-
- coin_wallet_id, wallet_type = info
- if wallet_id == coin_wallet_id:
- return True
-
- return False
+ wallet_identifier = await self.puzzle_store.get_wallet_identifier_for_puzzle_hash(coin.puzzle_hash)
+ return wallet_identifier is not None and wallet_identifier.id == wallet_id
async def get_confirmed_balance_for_wallet(
self,
@@ -619,27 +642,25 @@ async def unconfirmed_removals_for_wallet(self, wallet_id: int) -> Dict[bytes32,
async def determine_coin_type(
self, peer: WSChiaConnection, coin_state: CoinState, fork_height: Optional[uint32]
- ) -> Tuple[Optional[uint32], Optional[WalletType]]:
+ ) -> Optional[WalletIdentifier]:
if coin_state.created_height is not None and (
self.is_pool_reward(uint32(coin_state.created_height), coin_state.coin)
or self.is_farmer_reward(uint32(coin_state.created_height), coin_state.coin)
):
- return None, None
+ return None
response: List[CoinState] = await self.wallet_node.get_coin_state(
[coin_state.coin.parent_coin_info], peer=peer, fork_height=fork_height
)
if len(response) == 0:
self.log.warning(f"Could not find a parent coin with ID: {coin_state.coin.parent_coin_info}")
- return None, None
+ return None
parent_coin_state = response[0]
assert parent_coin_state.spent_height == coin_state.created_height
- coin_spend: Optional[CoinSpend] = await self.wallet_node.fetch_puzzle_solution(
- parent_coin_state.spent_height, parent_coin_state.coin, peer
- )
+ coin_spend = await fetch_coin_spend_for_coin_state(parent_coin_state, peer)
if coin_spend is None:
- return None, None
+ return None
puzzle = Program.from_bytes(bytes(coin_spend.puzzle_reveal))
@@ -662,9 +683,14 @@ async def determine_coin_type(
if did_curried_args is not None:
return await self.handle_did(did_curried_args, parent_coin_state, coin_state, coin_spend, peer)
+ # Check if the coin is a VC
+ is_vc, err_msg = VerifiedCredential.is_vc(uncurried)
+ if is_vc:
+ return await self.handle_vc(coin_spend)
+
await self.notification_manager.potentially_add_new_notification(coin_state, coin_spend)
- return None, None
+ return None
async def filter_spam(self, new_coin_state: List[CoinState]) -> List[CoinState]:
xch_spam_amount = self.config.get("xch_spam_amount", 1000000)
@@ -697,12 +723,8 @@ async def filter_spam(self, new_coin_state: List[CoinState]) -> List[CoinState]:
return filtered_cs
async def is_standard_wallet_tx(self, coin_state: CoinState) -> bool:
- wallet_info: Optional[Tuple[uint32, WalletType]] = await self.get_wallet_id_for_puzzle_hash(
- coin_state.coin.puzzle_hash
- )
- if wallet_info is not None and wallet_info[1] == WalletType.STANDARD_WALLET:
- return True
- return False
+ wallet_identifier = await self.get_wallet_identifier_for_puzzle_hash(coin_state.coin.puzzle_hash)
+ return wallet_identifier is not None and wallet_identifier.type == WalletType.STANDARD_WALLET
async def handle_cat(
self,
@@ -710,7 +732,7 @@ async def handle_cat(
parent_coin_state: CoinState,
coin_state: CoinState,
coin_spend: CoinSpend,
- ) -> Tuple[Optional[uint32], Optional[WalletType]]:
+ ) -> Optional[WalletIdentifier]:
"""
Handle the new coin when it is a CAT
:param curried_args: Curried arg of the CAT mod
@@ -719,8 +741,6 @@ async def handle_cat(
:param coin_spend: New coin spend
:return: Wallet ID & Wallet Type
"""
- wallet_id = None
- wallet_type = None
mod_hash, tail_hash, inner_puzzle = curried_args
hint_list = compute_coin_hints(coin_spend)
@@ -732,20 +752,20 @@ async def handle_cat(
if derivation_record is None:
self.log.info(f"Received state for the coin that doesn't belong to us {coin_state}")
+ return None
else:
our_inner_puzzle: Program = self.main_wallet.puzzle_for_pk(derivation_record.pubkey)
asset_id: bytes32 = bytes32(bytes(tail_hash)[1:])
cat_puzzle = construct_cat_puzzle(CAT_MOD, asset_id, our_inner_puzzle, CAT_MOD_HASH)
if cat_puzzle.get_tree_hash() != coin_state.coin.puzzle_hash:
- return None, None
+ return None
if bytes(tail_hash).hex()[2:] in self.default_cats or self.config.get(
"automatically_add_unknown_cats", False
):
cat_wallet = await CATWallet.get_or_create_wallet_for_cat(
self, self.main_wallet, bytes(tail_hash).hex()[2:]
)
- wallet_id = cat_wallet.id()
- wallet_type = cat_wallet.type()
+ return WalletIdentifier.create(cat_wallet)
else:
# Found unacknowledged CAT, save it in the database.
await self.interested_store.add_unacknowledged_token(
@@ -755,7 +775,7 @@ async def handle_cat(
parent_coin_state.coin.puzzle_hash,
)
self.state_changed("added_stray_cat")
- return wallet_id, wallet_type
+ return None
async def handle_did(
self,
@@ -764,7 +784,7 @@ async def handle_did(
coin_state: CoinState,
coin_spend: CoinSpend,
peer: WSChiaConnection,
- ) -> Tuple[Optional[uint32], Optional[WalletType]]:
+ ) -> Optional[WalletIdentifier]:
"""
Handle the new coin when it is a DID
:param curried_args: Curried arg of the DID mod
@@ -773,8 +793,6 @@ async def handle_did(
:param coin_spend: New coin spend
:return: Wallet ID & Wallet Type
"""
- wallet_id = None
- wallet_type = None
p2_puzzle, recovery_list_hash, num_verification, singleton_struct, metadata = curried_args
inner_puzzle_hash = p2_puzzle.get_tree_hash()
self.log.info(f"parent: {parent_coin_state.coin.name()} inner_puzzle_hash for parent is {inner_puzzle_hash}")
@@ -792,19 +810,21 @@ async def handle_did(
self.log.info(f"Received state for the coin that doesn't belong to us {coin_state}")
# Check if it was owned by us
removed_wallet_ids = []
- for wallet_info in await self.get_all_wallet_info_entries(wallet_type=WalletType.DECENTRALIZED_ID):
- did_info: DIDInfo = DIDInfo.from_json_dict(json.loads(wallet_info.data))
+ for wallet in self.wallets.values():
+ if not isinstance(wallet, DIDWallet):
+ continue
if (
- did_info.origin_coin is not None
- and launch_id == did_info.origin_coin.name()
- and not did_info.sent_recovery_transaction
+ wallet.did_info.origin_coin is not None
+ and launch_id == wallet.did_info.origin_coin.name()
+ and not wallet.did_info.sent_recovery_transaction
):
- await self.user_store.delete_wallet(wallet_info.id)
- removed_wallet_ids.append(wallet_info.id)
+ await self.user_store.delete_wallet(wallet.id())
+ removed_wallet_ids.append(wallet.id())
for remove_id in removed_wallet_ids:
self.wallets.pop(remove_id)
self.log.info(f"Removed DID wallet {remove_id}, Launch_ID: {launch_id.hex()}")
self.state_changed("wallet_removed", remove_id)
+ return None
else:
our_inner_puzzle: Program = self.main_wallet.puzzle_for_pk(derivation_record.pubkey)
@@ -812,23 +832,23 @@ async def handle_did(
did_puzzle = DID_INNERPUZ_MOD.curry(
our_inner_puzzle, recovery_list_hash, num_verification, singleton_struct, metadata
)
- full_puzzle = create_fullpuz(did_puzzle, launch_id)
+ full_puzzle = create_singleton_puzzle(did_puzzle, launch_id)
did_puzzle_empty_recovery = DID_INNERPUZ_MOD.curry(
our_inner_puzzle, Program.to([]).get_tree_hash(), uint64(0), singleton_struct, metadata
)
- full_puzzle_empty_recovery = create_fullpuz(did_puzzle_empty_recovery, launch_id)
+ full_puzzle_empty_recovery = create_singleton_puzzle(did_puzzle_empty_recovery, launch_id)
if full_puzzle.get_tree_hash() != coin_state.coin.puzzle_hash:
if full_puzzle_empty_recovery.get_tree_hash() == coin_state.coin.puzzle_hash:
did_puzzle = did_puzzle_empty_recovery
self.log.info("DID recovery list was reset by the previous owner.")
else:
self.log.error("DID puzzle hash doesn't match, please check curried parameters.")
- return None, None
+ return None
# Create DID wallet
response: List[CoinState] = await self.wallet_node.get_coin_state([launch_id], peer=peer)
if len(response) == 0:
self.log.warning(f"Could not find the launch coin with ID: {launch_id}")
- return None, None
+ return None
launch_coin: CoinState = response[0]
origin_coin = launch_coin.coin
@@ -837,7 +857,7 @@ async def handle_did(
assert isinstance(wallet, DIDWallet)
assert wallet.did_info.origin_coin is not None
if origin_coin.name() == wallet.did_info.origin_coin.name():
- return wallet.id(), wallet.type()
+ return WalletIdentifier.create(wallet)
did_wallet = await DIDWallet.create_new_did_wallet_from_coin_spend(
self,
self.main_wallet,
@@ -846,17 +866,14 @@ async def handle_did(
coin_spend,
f"DID {encode_puzzle_hash(launch_id, AddressType.DID.hrp(self.config))}",
)
- wallet_id = did_wallet.id()
- wallet_type = did_wallet.type()
- self.state_changed("wallet_created", wallet_id, {"did_id": did_wallet.get_my_DID()})
- return wallet_id, wallet_type
+ wallet_identifier = WalletIdentifier.create(did_wallet)
+ self.state_changed("wallet_created", wallet_identifier.id, {"did_id": did_wallet.get_my_DID()})
+ return wallet_identifier
async def get_minter_did(self, launcher_coin: Coin, peer: WSChiaConnection) -> Optional[bytes32]:
# Get minter DID
eve_coin = (await self.wallet_node.fetch_children(launcher_coin.name(), peer=peer))[0]
- eve_coin_spend: CoinSpend = await self.wallet_node.fetch_puzzle_solution(
- eve_coin.spent_height, eve_coin.coin, peer
- )
+ eve_coin_spend = await fetch_coin_spend_for_coin_state(eve_coin, peer)
eve_full_puzzle: Program = Program.from_bytes(bytes(eve_coin_spend.puzzle_reveal))
eve_uncurried_nft: Optional[UncurriedNFT] = UncurriedNFT.uncurry(*eve_full_puzzle.uncurry())
if eve_uncurried_nft is None:
@@ -880,9 +897,7 @@ async def get_minter_did(self, launcher_coin: Coin, peer: WSChiaConnection) -> O
[launcher_parent[0].coin.parent_coin_info], peer=peer
)
assert did_coin is not None and len(did_coin) == 1 and did_coin[0].spent_height is not None
- did_spend: CoinSpend = await self.wallet_node.fetch_puzzle_solution(
- did_coin[0].spent_height, did_coin[0].coin, peer
- )
+ did_spend = await fetch_coin_spend_for_coin_state(did_coin[0], peer)
puzzle = Program.from_bytes(bytes(did_spend.puzzle_reveal))
uncurried = uncurry_puzzle(puzzle)
did_curried_args = match_did_puzzle(uncurried.mod, uncurried.args)
@@ -893,7 +908,7 @@ async def get_minter_did(self, launcher_coin: Coin, peer: WSChiaConnection) -> O
async def handle_nft(
self, coin_spend: CoinSpend, uncurried_nft: UncurriedNFT, parent_coin_state: CoinState, coin_state: CoinState
- ) -> Tuple[Optional[uint32], Optional[WalletType]]:
+ ) -> Optional[WalletIdentifier]:
"""
Handle the new coin when it is a NFT
:param coin_spend: New coin spend
@@ -902,8 +917,7 @@ async def handle_nft(
:param coin_state: Current coin state
:return: Wallet ID & Wallet Type
"""
- wallet_id = None
- wallet_type = None
+ wallet_identifier = None
# DID ID determines which NFT wallet should process the NFT
new_did_id = None
old_did_id = None
@@ -939,42 +953,40 @@ async def handle_nft(
"Cannot find a P2 puzzle hash for NFT:%s, this NFT belongs to others.",
uncurried_nft.singleton_launcher_id.hex(),
)
- return wallet_id, wallet_type
- for wallet_info in await self.get_all_wallet_info_entries(wallet_type=WalletType.NFT):
- nft_wallet_info: NFTWalletInfo = NFTWalletInfo.from_json_dict(json.loads(wallet_info.data))
- if nft_wallet_info.did_id == old_did_id and old_derivation_record is not None:
+ return wallet_identifier
+ for nft_wallet in self.wallets.copy().values():
+ if not isinstance(nft_wallet, NFTWallet):
+ continue
+ if nft_wallet.nft_wallet_info.did_id == old_did_id and old_derivation_record is not None:
self.log.info(
"Removing old NFT, NFT_ID:%s, DID_ID:%s",
uncurried_nft.singleton_launcher_id.hex(),
old_did_id,
)
- nft_wallet = self.get_wallet(id=wallet_info.id, required_type=NFTWallet)
if parent_coin_state.spent_height is not None:
await nft_wallet.remove_coin(coin_spend.coin, uint32(parent_coin_state.spent_height))
is_empty = await nft_wallet.is_empty()
has_did = False
- for did_wallet_info in await self.get_all_wallet_info_entries(
- wallet_type=WalletType.DECENTRALIZED_ID
- ):
- did_wallet: DIDInfo = DIDInfo.from_json_dict(json.loads(did_wallet_info.data))
- assert did_wallet.origin_coin is not None
- if did_wallet.origin_coin.name() == old_did_id:
+ for did_wallet in self.wallets.values():
+ if not isinstance(did_wallet, DIDWallet):
+ continue
+ assert did_wallet.did_info.origin_coin is not None
+ if did_wallet.did_info.origin_coin.name() == old_did_id:
has_did = True
break
if is_empty and nft_wallet.did_id is not None and not has_did:
self.log.info(f"No NFT, deleting wallet {nft_wallet.did_id.hex()} ...")
await self.user_store.delete_wallet(nft_wallet.wallet_info.id)
self.wallets.pop(nft_wallet.wallet_info.id)
- if nft_wallet_info.did_id == new_did_id and new_derivation_record is not None:
+ if nft_wallet.nft_wallet_info.did_id == new_did_id and new_derivation_record is not None:
self.log.info(
"Adding new NFT, NFT_ID:%s, DID_ID:%s",
uncurried_nft.singleton_launcher_id.hex(),
new_did_id,
)
- wallet_id = wallet_info.id
- wallet_type = WalletType.NFT
+ wallet_identifier = WalletIdentifier.create(nft_wallet)
- if wallet_id is None and new_derivation_record is not None:
+ if wallet_identifier is None and new_derivation_record is not None:
# Cannot find an existed NFT wallet for the new NFT
self.log.info(
"Cannot find a NFT wallet for NFT_ID: %s DID_ID: %s, creating a new one.",
@@ -984,11 +996,29 @@ async def handle_nft(
new_nft_wallet: NFTWallet = await NFTWallet.create_new_nft_wallet(
self, self.main_wallet, did_id=new_did_id, name="NFT Wallet"
)
- wallet_id = uint32(new_nft_wallet.wallet_id)
- wallet_type = WalletType.NFT
- return wallet_id, wallet_type
+ wallet_identifier = WalletIdentifier.create(new_nft_wallet)
+ return wallet_identifier
+
+ async def handle_vc(self, parent_coin_spend: CoinSpend) -> Optional[WalletIdentifier]:
+ # Check the ownership
+ vc: VerifiedCredential = VerifiedCredential.get_next_from_coin_spend(parent_coin_spend)
+ derivation_record: Optional[DerivationRecord] = await self.puzzle_store.get_derivation_record_for_puzzle_hash(
+ vc.inner_puzzle_hash
+ )
+ if derivation_record is None:
+ self.log.warning(
+ f"Verified credential {vc.launcher_id.hex()} is not belong to the current wallet."
+ ) # pragma: no cover
+ return None # pragma: no cover
+ self.log.info(f"Found verified credential {vc.launcher_id.hex()}.")
+ for wallet_info in await self.get_all_wallet_info_entries(wallet_type=WalletType.VC):
+ return WalletIdentifier(wallet_info.id, WalletType.VC)
+ else:
+ # Create a new VC wallet
+ vc_wallet = await VCWallet.create_new_vc_wallet(self, self.main_wallet) # pragma: no cover
+ return WalletIdentifier(vc_wallet.id(), WalletType.VC) # pragma: no cover
- async def add_coin_states(
+ async def _add_coin_states(
self,
coin_states: List[CoinState],
peer: WSChiaConnection,
@@ -1006,14 +1036,16 @@ async def add_coin_states(
trade_removals = await self.trade_manager.get_coins_of_interest()
all_unconfirmed: List[TransactionRecord] = await self.tx_store.get_all_unconfirmed()
used_up_to = -1
- ph_to_index_cache: LRUCache = LRUCache(100)
+ ph_to_index_cache: LRUCache[bytes32, uint32] = LRUCache(100)
coin_names = [coin_state.coin.name() for coin_state in coin_states]
- local_records = await self.coin_store.get_coin_records(coin_names)
+ local_records = await self.coin_store.get_coin_records(coin_id_filter=HashFilter.include(coin_names))
for coin_name, coin_state in zip(coin_names, coin_states):
+ if peer.closed:
+ raise ConnectionError("Connection closed")
self.log.debug("Add coin state: %s: %s", coin_name, coin_state)
- local_record = local_records.get(coin_name)
+ local_record = local_records.coin_id_to_record.get(coin_name)
rollback_wallets = None
try:
async with self.db_wrapper.writer():
@@ -1021,9 +1053,7 @@ async def add_coin_states(
# This only succeeds if we don't raise out of the transaction
await self.retry_store.remove_state(coin_state)
- wallet_info: Optional[Tuple[uint32, WalletType]] = await self.get_wallet_id_for_puzzle_hash(
- coin_state.coin.puzzle_hash
- )
+ wallet_identifier = await self.get_wallet_identifier_for_puzzle_hash(coin_state.coin.puzzle_hash)
# If we already have this coin, & it was spent & confirmed at the same heights, then return (done)
if local_record is not None:
@@ -1038,25 +1068,24 @@ async def add_coin_states(
if coin_state.spent_height is not None and coin_name in trade_removals:
await self.trade_manager.coins_of_interest_farmed(coin_state, fork_height, peer)
- wallet_id: Optional[uint32] = None
- wallet_type: Optional[WalletType] = None
- if wallet_info is not None:
- wallet_id, wallet_type = wallet_info
+ if wallet_identifier is not None:
+ self.log.debug(f"Found existing wallet_identifier: {wallet_identifier}, coin: {coin_name}")
elif local_record is not None:
- wallet_id = uint32(local_record.wallet_id)
- wallet_type = local_record.wallet_type
+ wallet_identifier = WalletIdentifier(uint32(local_record.wallet_id), local_record.wallet_type)
elif coin_state.created_height is not None:
- wallet_id, wallet_type = await self.determine_coin_type(peer, coin_state, fork_height)
- potential_dl = self.get_dl_wallet()
- if potential_dl is not None:
+ wallet_identifier = await self.determine_coin_type(peer, coin_state, fork_height)
+ try:
+ dl_wallet = self.get_dl_wallet()
+ except ValueError:
+ pass
+ else:
if (
- await potential_dl.get_singleton_record(coin_name) is not None
+ await dl_wallet.get_singleton_record(coin_name) is not None
or coin_state.coin.puzzle_hash == MIRROR_PUZZLE_HASH
):
- wallet_id = potential_dl.id()
- wallet_type = potential_dl.type()
+ wallet_identifier = WalletIdentifier.create(dl_wallet)
- if wallet_id is None or wallet_type is None:
+ if wallet_identifier is None:
self.log.debug(f"No wallet for coin state: {coin_state}")
continue
@@ -1081,8 +1110,8 @@ async def add_coin_states(
coin_state.coin,
uint32(coin_state.created_height),
all_unconfirmed,
- wallet_id,
- wallet_type,
+ wallet_identifier.id,
+ wallet_identifier.type,
peer,
coin_name,
)
@@ -1110,28 +1139,33 @@ async def add_coin_states(
uint32(coin_state.spent_height),
True,
farmer_reward or pool_reward,
- wallet_type,
- wallet_id,
+ wallet_identifier.type,
+ wallet_identifier.id,
)
await self.coin_store.add_coin_record(record)
# Coin first received
parent_coin_record: Optional[WalletCoinRecord] = await self.coin_store.get_coin_record(
coin_state.coin.parent_coin_info
)
- if parent_coin_record is not None and wallet_type.value == parent_coin_record.wallet_type:
+ if (
+ parent_coin_record is not None
+ and wallet_identifier.type == parent_coin_record.wallet_type
+ ):
change = True
else:
change = False
if not change:
created_timestamp = await self.wallet_node.get_timestamp_for_height(
- coin_state.created_height
+ uint32(coin_state.created_height)
)
tx_record = TransactionRecord(
confirmed_at_height=uint32(coin_state.created_height),
created_at_time=uint64(created_timestamp),
to_puzzle_hash=(
- await self.convert_puzzle_hash(wallet_id, coin_state.coin.puzzle_hash)
+ await self.convert_puzzle_hash(
+ wallet_identifier.id, coin_state.coin.puzzle_hash
+ )
),
amount=uint64(coin_state.coin.amount),
fee_amount=uint64(0),
@@ -1140,7 +1174,7 @@ async def add_coin_states(
spend_bundle=None,
additions=[coin_state.coin],
removals=[],
- wallet_id=wallet_id,
+ wallet_id=wallet_identifier.id,
sent_to=[],
trade_id=None,
type=uint32(tx_type),
@@ -1160,15 +1194,18 @@ async def add_coin_states(
derivation_record = await self.puzzle_store.get_derivation_record_for_puzzle_hash(
coin.puzzle_hash
)
- if derivation_record is None:
+ if derivation_record is None: # not change
to_puzzle_hash = coin.puzzle_hash
amount += coin.amount
+ elif wallet_identifier.type == WalletType.CAT:
+ # We subscribe to change for CATs since they didn't hint previously
+ await self.add_interested_coin_ids([coin.name()])
if to_puzzle_hash is None:
to_puzzle_hash = additions[0].puzzle_hash
spent_timestamp = await self.wallet_node.get_timestamp_for_height(
- coin_state.spent_height
+ uint32(coin_state.spent_height)
)
# Reorg rollback adds reorged transactions so it's possible there is tx_record already
@@ -1188,7 +1225,9 @@ async def add_coin_states(
tx_record = TransactionRecord(
confirmed_at_height=uint32(coin_state.spent_height),
created_at_time=uint64(spent_timestamp),
- to_puzzle_hash=(await self.convert_puzzle_hash(wallet_id, to_puzzle_hash)),
+ to_puzzle_hash=(
+ await self.convert_puzzle_hash(wallet_identifier.id, to_puzzle_hash)
+ ),
amount=uint64(int(amount)),
fee_amount=uint64(fee),
confirmed=True,
@@ -1196,7 +1235,7 @@ async def add_coin_states(
spend_bundle=None,
additions=additions,
removals=[coin_state.coin],
- wallet_id=wallet_id,
+ wallet_id=wallet_identifier.id,
sent_to=[],
trade_id=None,
type=uint32(TransactionType.OUTGOING_TX.value),
@@ -1229,9 +1268,7 @@ async def add_coin_states(
curr_coin_state: CoinState = coin_state
while curr_coin_state.spent_height is not None:
- cs: CoinSpend = await self.wallet_node.fetch_puzzle_solution(
- curr_coin_state.spent_height, curr_coin_state.coin, peer
- )
+ cs = await fetch_coin_spend_for_coin_state(curr_coin_state, peer)
success = await pool_wallet.apply_state_transition(
cs, uint32(curr_coin_state.spent_height)
)
@@ -1264,9 +1301,7 @@ async def add_coin_states(
assert len(new_coin_state) == 1
curr_coin_state = new_coin_state[0]
if record.wallet_type == WalletType.DATA_LAYER:
- singleton_spend = await self.wallet_node.fetch_puzzle_solution(
- coin_state.spent_height, coin_state.coin, peer
- )
+ singleton_spend = await fetch_coin_spend_for_coin_state(coin_state, peer)
dl_wallet = self.get_wallet(id=uint32(record.wallet_id), required_type=DataLayerWallet)
await dl_wallet.singleton_removed(
singleton_spend,
@@ -1277,6 +1312,10 @@ async def add_coin_states(
if coin_state.spent_height is not None:
nft_wallet = self.get_wallet(id=uint32(record.wallet_id), required_type=NFTWallet)
await nft_wallet.remove_coin(coin_state.coin, uint32(coin_state.spent_height))
+ elif record.wallet_type == WalletType.VC:
+ if coin_state.spent_height is not None:
+ vc_wallet = self.get_wallet(id=uint32(record.wallet_id), required_type=VCWallet)
+ await vc_wallet.remove_coin(coin_state.coin, uint32(coin_state.spent_height))
# Check if a child is a singleton launcher
for child in children:
@@ -1287,9 +1326,7 @@ async def add_coin_states(
if child.spent_height is None:
# TODO handle spending launcher later block
continue
- launcher_spend: Optional[CoinSpend] = await self.wallet_node.fetch_puzzle_solution(
- child.spent_height, child.coin, peer
- )
+ launcher_spend = await fetch_coin_spend_for_coin_state(child, peer)
if launcher_spend is None:
continue
try:
@@ -1303,15 +1340,11 @@ async def add_coin_states(
and inner_puzhash is not None
and (await self.puzzle_store.puzzle_hash_exists(inner_puzhash))
):
- for _, wallet in self.wallets.items():
- if wallet.type() == WalletType.DATA_LAYER.value:
- assert isinstance(wallet, DataLayerWallet)
- dl_wallet = wallet
- break
- else: # No DL wallet exists yet
+ try:
+ dl_wallet = self.get_dl_wallet()
+ except ValueError:
dl_wallet = await DataLayerWallet.create_new_dl_wallet(
self,
- self.main_wallet,
)
await dl_wallet.track_new_launcher_id(
child.coin.name(),
@@ -1363,6 +1396,23 @@ async def add_coin_states(
await self.retry_store.remove_state(coin_state)
continue
+ async def add_coin_states(
+ self,
+ coin_states: List[CoinState],
+ peer: WSChiaConnection,
+ fork_height: Optional[uint32],
+ ) -> bool:
+ try:
+ await self._add_coin_states(coin_states, peer, fork_height)
+ except Exception as e:
+ log_level = logging.DEBUG if peer.closed else logging.ERROR
+ self.log.log(log_level, f"add_coin_states failed - exception {e}, traceback: {traceback.format_exc()}")
+ return False
+
+ await self.blockchain.clean_block_records()
+
+ return True
+
async def have_a_pool_wallet_with_launched_id(self, launcher_id: bytes32) -> bool:
for wallet_id, wallet in self.wallets.items():
if wallet.type() == WalletType.POOLING_WALLET:
@@ -1400,11 +1450,10 @@ def is_farmer_reward(self, created_height: uint32, coin: Coin) -> bool:
return True
return False
- async def get_wallet_id_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[Tuple[uint32, WalletType]]:
- info = await self.puzzle_store.wallet_info_for_puzzle_hash(puzzle_hash)
- if info is not None:
- wallet_id, wallet_type = info
- return uint32(wallet_id), wallet_type
+ async def get_wallet_identifier_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[WalletIdentifier]:
+ wallet_identifier = await self.puzzle_store.get_wallet_identifier_for_puzzle_hash(puzzle_hash)
+ if wallet_identifier is not None:
+ return wallet_identifier
interested_wallet_id = await self.interested_store.get_interested_puzzle_hash_wallet_id(puzzle_hash=puzzle_hash)
if interested_wallet_id is not None:
@@ -1412,7 +1461,7 @@ async def get_wallet_id_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[
if wallet_id not in self.wallets.keys():
self.log.warning(f"Do not have wallet {wallet_id} for puzzle_hash {puzzle_hash}")
return None
- return uint32(wallet_id), self.wallets[uint32(wallet_id)].type()
+ return WalletIdentifier(uint32(wallet_id), self.wallets[uint32(wallet_id)].type())
return None
async def coin_added(
@@ -1478,6 +1527,7 @@ async def coin_added(
if tx_record.amount > 0:
await self.tx_store.add_transaction_record(tx_record)
+ # We only add normal coins here
coin_record: WalletCoinRecord = WalletCoinRecord(
coin, height, uint32(0), False, coinbase, wallet_type, wallet_id
)
@@ -1487,7 +1537,7 @@ async def coin_added(
await self.create_more_puzzle_hashes()
- async def add_pending_transaction(self, tx_record: TransactionRecord):
+ async def add_pending_transaction(self, tx_record: TransactionRecord) -> None:
"""
Called from wallet before new transaction is sent to the full_node
"""
@@ -1502,7 +1552,7 @@ async def add_pending_transaction(self, tx_record: TransactionRecord):
self.tx_pending_changed()
self.state_changed("pending_transaction", tx_record.wallet_id)
- async def add_transaction(self, tx_record: TransactionRecord):
+ async def add_transaction(self, tx_record: TransactionRecord) -> None:
"""
Called from wallet to add transaction that is not being set to full_node
"""
@@ -1515,7 +1565,7 @@ async def remove_from_queue(
name: str,
send_status: MempoolInclusionStatus,
error: Optional[Err],
- ):
+ ) -> None:
"""
Full node received our transaction, no need to keep it in queue anymore, unless there was an error
"""
@@ -1579,20 +1629,13 @@ async def get_all_transactions(self, wallet_id: int) -> List[TransactionRecord]:
async def get_transaction(self, tx_id: bytes32) -> Optional[TransactionRecord]:
return await self.tx_store.get_transaction_record(tx_id)
- async def get_transaction_by_wallet_record(self, wr: WalletCoinRecord) -> Optional[TransactionRecord]:
- records = await self.tx_store.get_transactions_by_height(wr.confirmed_block_height)
- for record in records:
- if wr.coin in record.additions or record.removals:
- return record
- return None
-
async def get_coin_record_by_wallet_record(self, wr: WalletCoinRecord) -> CoinRecord:
timestamp: uint64 = await self.wallet_node.get_timestamp_for_height(wr.confirmed_block_height)
return wr.to_coin_record(timestamp)
- async def get_coin_records_by_coin_ids(self, **kwargs) -> List[CoinRecord]:
- records = await self.coin_store.get_coin_records(**kwargs)
- return [await self.get_coin_record_by_wallet_record(record) for record in records.values()]
+ async def get_coin_records_by_coin_ids(self, **kwargs: Any) -> List[CoinRecord]:
+ result = await self.coin_store.get_coin_records(**kwargs)
+ return [await self.get_coin_record_by_wallet_record(record) for record in result.records]
async def get_wallet_for_coin(self, coin_id: bytes32) -> Optional[WalletProtocol]:
coin_record = await self.coin_store.get_coin_record(coin_id)
@@ -1612,7 +1655,7 @@ async def reorg_rollback(self, height: int) -> List[uint32]:
reorged: List[TransactionRecord] = await self.tx_store.get_transaction_above(height)
await self.tx_store.rollback_to_block(height)
for record in reorged:
- if record.type in [
+ if TransactionType(record.type) in [
TransactionType.OUTGOING_TX,
TransactionType.OUTGOING_TRADE,
TransactionType.INCOMING_TRADE,
@@ -1642,7 +1685,7 @@ def unlink_db(self) -> None:
async def get_all_wallet_info_entries(self, wallet_type: Optional[WalletType] = None) -> List[WalletInfo]:
return await self.user_store.get_all_wallet_info_entries(wallet_type)
- async def get_wallet_for_asset_id(self, asset_id: str):
+ async def get_wallet_for_asset_id(self, asset_id: str) -> Optional[WalletProtocol]:
for wallet_id, wallet in self.wallets.items():
if wallet.type() == WalletType.CAT:
assert isinstance(wallet, CATWallet)
@@ -1659,7 +1702,7 @@ async def get_wallet_for_asset_id(self, asset_id: str):
return wallet
return None
- async def get_wallet_for_puzzle_info(self, puzzle_driver: PuzzleInfo):
+ async def get_wallet_for_puzzle_info(self, puzzle_driver: PuzzleInfo) -> Optional[WalletProtocol]:
for wallet in self.wallets.values():
match_function = getattr(wallet, "match_puzzle_info", None)
if match_function is not None and callable(match_function):
@@ -1667,7 +1710,7 @@ async def get_wallet_for_puzzle_info(self, puzzle_driver: PuzzleInfo):
return wallet
return None
- async def create_wallet_for_puzzle_info(self, puzzle_driver: PuzzleInfo, name=None):
+ async def create_wallet_for_puzzle_info(self, puzzle_driver: PuzzleInfo, name: Optional[str] = None) -> None:
if AssetType(puzzle_driver.type()) in self.asset_to_wallet_map:
await self.asset_to_wallet_map[AssetType(puzzle_driver.type())].create_from_puzzle_info(
self,
@@ -1709,7 +1752,7 @@ async def get_spendable_coins_for_wallet(
return filtered
- async def new_peak(self, peak: wallet_protocol.NewPeakWallet):
+ async def new_peak(self, peak: NewPeakWallet) -> None:
for wallet_id, wallet in self.wallets.items():
if wallet.type() == WalletType.POOLING_WALLET:
assert isinstance(wallet, PoolWallet)
@@ -1731,7 +1774,7 @@ async def add_interested_coin_ids(self, coin_ids: List[bytes32]) -> None:
if len(coin_ids) > 0:
await self.wallet_node.new_peak_queue.subscribe_to_coin_ids(coin_ids)
- async def delete_trade_transactions(self, trade_id: bytes32):
+ async def delete_trade_transactions(self, trade_id: bytes32) -> None:
txs: List[TransactionRecord] = await self.tx_store.get_transactions_by_trade_id(trade_id)
for tx in txs:
await self.tx_store.delete_transaction_record(tx.name)
@@ -1745,9 +1788,23 @@ async def convert_puzzle_hash(self, wallet_id: uint32, puzzle_hash: bytes32) ->
return puzzle_hash
- def get_dl_wallet(self) -> Optional[DataLayerWallet]:
- for _, wallet in self.wallets.items():
+ def get_dl_wallet(self) -> DataLayerWallet:
+ for wallet in self.wallets.values():
if wallet.type() == WalletType.DATA_LAYER.value:
- assert isinstance(wallet, DataLayerWallet)
+ assert isinstance(
+ wallet, DataLayerWallet
+ ), f"WalletType.DATA_LAYER should be a DataLayerWallet instance got: {type(wallet).__name__}"
return wallet
- return None
+ raise ValueError("DataLayerWallet not available")
+
+ async def get_or_create_vc_wallet(self) -> VCWallet:
+ for _, wallet in self.wallets.items():
+ if WalletType(wallet.type()) == WalletType.VC:
+ assert isinstance(wallet, VCWallet)
+ vc_wallet: VCWallet = wallet
+ break
+ else:
+ # Create a new VC wallet
+ vc_wallet = await VCWallet.create_new_vc_wallet(self, self.main_wallet)
+
+ return vc_wallet
diff --git a/chia/wallet/wallet_transaction_store.py b/chia/wallet/wallet_transaction_store.py
--- a/chia/wallet/wallet_transaction_store.py
+++ b/chia/wallet/wallet_transaction_store.py
@@ -12,6 +12,7 @@
from chia.util.ints import uint8, uint32
from chia.wallet.transaction_record import TransactionRecord
from chia.wallet.transaction_sorting import SortKey
+from chia.wallet.util.query_filter import FilterMode, TransactionTypeFilter
from chia.wallet.util.transaction_type import TransactionType
log = logging.getLogger(__name__)
@@ -186,19 +187,6 @@ async def get_transaction_record(self, tx_id: bytes32) -> Optional[TransactionRe
return TransactionRecord.from_bytes(rows[0][0])
return None
- async def get_transactions_by_height(self, height: uint32) -> List[TransactionRecord]:
- """
- Checks DB and cache for TransactionRecord with id: id and returns it.
- """
- async with self.db_wrapper.reader_no_transaction() as conn:
- # NOTE: bundle_id is being stored as bytes, not hex
- rows = list(
- await conn.execute_fetchall(
- "SELECT transaction_record from transaction_record WHERE confirmed_at_height=?", (height,)
- )
- )
- return [TransactionRecord.from_bytes(row[0]) for row in rows]
-
# TODO: This should probably be split into separate function, one that
# queries the state and one that updates it. Also, include_accepted_txs=True
# might be a separate function too.
@@ -269,7 +257,14 @@ async def get_unconfirmed_for_wallet(self, wallet_id: int) -> List[TransactionRe
return [TransactionRecord.from_bytes(row[0]) for row in rows]
async def get_transactions_between(
- self, wallet_id: int, start, end, sort_key=None, reverse=False, to_puzzle_hash: Optional[bytes32] = None
+ self,
+ wallet_id: int,
+ start,
+ end,
+ sort_key=None,
+ reverse=False,
+ to_puzzle_hash: Optional[bytes32] = None,
+ type_filter: Optional[TransactionTypeFilter] = None,
) -> List[TransactionRecord]:
"""Return a list of transaction between start and end index. List is in reverse chronological order.
start = 0 is most recent transaction
@@ -291,10 +286,18 @@ async def get_transactions_between(
else:
query_str = SortKey[sort_key].ascending()
+ if type_filter is None:
+ type_filter_str = ""
+ else:
+ type_filter_str = (
+ f"AND type {'' if type_filter.mode == FilterMode.include else 'NOT'} "
+ f"IN ({','.join([str(x) for x in type_filter.values])})"
+ )
+
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
f"SELECT transaction_record FROM transaction_record WHERE wallet_id=?{puzz_hash_where}"
- f" {query_str}, rowid"
+ f" {type_filter_str} {query_str}, rowid"
f" LIMIT {start}, {limit}",
(wallet_id,),
)
diff --git a/chia/wallet/wallet_weight_proof_handler.py b/chia/wallet/wallet_weight_proof_handler.py
--- a/chia/wallet/wallet_weight_proof_handler.py
+++ b/chia/wallet/wallet_weight_proof_handler.py
@@ -3,14 +3,14 @@
import asyncio
import logging
import tempfile
+import time
from concurrent.futures.process import ProcessPoolExecutor
from multiprocessing.context import BaseContext
-from typing import IO, List, Optional, Tuple
+from typing import IO, List, Optional
from chia.consensus.block_record import BlockRecord
from chia.consensus.constants import ConsensusConstants
from chia.full_node.weight_proof import _validate_sub_epoch_summaries, validate_weight_proof_inner
-from chia.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from chia.types.weight_proof import WeightProof
from chia.util.ints import uint32
from chia.util.setproctitle import getproctitle, setproctitle
@@ -37,42 +37,35 @@ def __init__(
initializer=setproctitle,
initargs=(f"{getproctitle()}_worker",),
)
- self._weight_proof_tasks: List[asyncio.Task[Tuple[bool, List[BlockRecord]]]] = []
def cancel_weight_proof_tasks(self) -> None:
- for task in self._weight_proof_tasks:
- if not task.done():
- task.cancel()
- self._weight_proof_tasks = []
self._executor_shutdown_tempfile.close()
self._executor.shutdown(wait=True)
async def validate_weight_proof(
self, weight_proof: WeightProof, skip_segment_validation: bool = False, old_proof: Optional[WeightProof] = None
- ) -> Tuple[bool, List[SubEpochSummary], List[BlockRecord]]:
+ ) -> List[BlockRecord]:
+ start_time = time.time()
summaries, sub_epoch_weight_list = _validate_sub_epoch_summaries(self._constants, weight_proof)
await asyncio.sleep(0) # break up otherwise multi-second sync code
if summaries is None or sub_epoch_weight_list is None:
- log.error("weight proof failed sub epoch data validation")
- return False, [], []
+ raise ValueError("weight proof failed sub epoch data validation")
validate_from = get_fork_ses_idx(old_proof, weight_proof)
- task = asyncio.create_task(
- validate_weight_proof_inner(
- self._constants,
- self._executor,
- self._executor_shutdown_tempfile.name,
- self._num_processes,
- weight_proof,
- summaries,
- sub_epoch_weight_list,
- skip_segment_validation,
- validate_from,
- )
+ valid, block_records = await validate_weight_proof_inner(
+ self._constants,
+ self._executor,
+ self._executor_shutdown_tempfile.name,
+ self._num_processes,
+ weight_proof,
+ summaries,
+ sub_epoch_weight_list,
+ skip_segment_validation,
+ validate_from,
)
- self._weight_proof_tasks.append(task)
- valid, block_records = await task
- self._weight_proof_tasks.remove(task)
- return valid, summaries, block_records
+ if not valid:
+ raise ValueError("weight proof validation failed")
+ log.info(f"It took {time.time() - start_time} time to validate the weight proof {weight_proof.get_hash()}")
+ return block_records
def get_wp_fork_point(constants: ConsensusConstants, old_wp: Optional[WeightProof], new_wp: WeightProof) -> uint32:
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -8,22 +8,23 @@
dependencies = [
"aiofiles==23.1.0", # Async IO for files
"anyio==3.6.2",
+ "boto3==1.26.131", # AWS S3 for DL s3 plugin
"blspy==1.0.16", # Signature library
"chiavdf==1.0.8", # timelord and vdf verification
"chiabip158==1.2", # bip158-style wallet filters
"chiapos==1.0.12b13", # proof of space
"clvm==0.9.7",
"clvm_tools==0.4.6", # Currying, Program.to, other conveniences
- "chia_rs==0.2.5",
+ "chia_rs==0.2.7",
"clvm-tools-rs==0.1.30", # Rust implementation of clvm_tools' compiler
"aiohttp==3.8.4", # HTTP server for full node rpc
- "aiosqlite==0.17.0", # asyncio wrapper for sqlite, to store blocks
- "bitstring==4.0.1", # Binary data management library
+ "aiosqlite==0.19.0", # asyncio wrapper for sqlite, to store blocks
+ "bitstring==4.0.2", # Binary data management library
"colorama==0.4.6", # Colorizes terminal output
"colorlog==6.7.0", # Adds color to logs
- "concurrent-log-handler==0.9.20", # Concurrently log and rotate logs
+ "concurrent-log-handler==0.9.23", # Concurrently log and rotate logs
"cryptography==39.0.1", # Python cryptography library for TLS - keyring conflict
- "filelock==3.9.0", # For reading and writing config multiprocess and multithread safely (non-reentrant locks)
+ "filelock==3.12.0", # For reading and writing config multiprocess and multithread safely (non-reentrant locks)
"keyring==23.13.1", # Store keys in MacOS Keychain, Windows Credential Locker
"PyYAML==6.0", # Used for config file format
"setproctitle==1.3.2", # Gives the chia processes readable names
@@ -33,8 +34,8 @@
"watchdog==2.2.0", # Filesystem event watching - watches keyring.yaml
"dnslib==0.9.23", # dns lib
"typing-extensions==4.5.0", # typing backports like Protocol and TypedDict
- "zstd==1.5.4.0",
- "packaging==23.0",
+ "zstd==1.5.5.1",
+ "packaging==23.1",
"psutil==5.9.4",
]
@@ -44,7 +45,8 @@
dev_dependencies = [
"build",
- "coverage",
+ # >=7.2.4 for https://github.com/nedbat/coveragepy/issues/1604
+ "coverage>=7.2.4",
"diff-cover",
"pre-commit",
"py3createtorrent",
@@ -58,10 +60,10 @@
"isort",
"flake8",
"mypy",
- "black==22.10.0",
+ "black==23.3.0",
"aiohttp_cors", # For blackd
"ipython", # For asyncio debugging
- "pyinstaller==5.8.0",
+ "pyinstaller==5.10.1",
"types-aiofiles",
"types-cryptography",
"types-pkg_resources",
@@ -119,9 +121,11 @@
"chia.wallet.cat_wallet",
"chia.wallet.did_wallet",
"chia.wallet.nft_wallet",
- "chia.wallet.settings",
"chia.wallet.trading",
"chia.wallet.util",
+ "chia.wallet.vc_wallet",
+ "chia.wallet.vc_wallet.vc_puzzles",
+ "chia.wallet.vc_wallet.cr_puzzles",
"chia.ssl",
"mozilla-ca",
],
@@ -141,11 +145,12 @@
"chia_full_node_simulator = chia.simulator.start_simulator:main",
"chia_data_layer = chia.server.start_data_layer:main",
"chia_data_layer_http = chia.data_layer.data_layer_server:main",
+ "chia_data_layer_s3_plugin = chia.data_layer.s3_plugin_service:run_server",
]
},
package_data={
"chia": ["pyinstaller.spec"],
- "": ["*.clvm", "*.clvm.hex", "*.clib", "*.clinc", "*.clsp", "py.typed"],
+ "": ["*.clsp", "*.clsp.hex", "*.clvm", "*.clib", "py.typed"],
"chia.util": ["initial-*.yaml", "english.txt"],
"chia.ssl": ["chia_ca.crt", "chia_ca.key", "dst_root_ca.pem"],
"mozilla-ca": ["cacert.pem"],
diff --git a/tools/analyze-chain.py b/tools/analyze-chain.py
--- a/tools/analyze-chain.py
+++ b/tools/analyze-chain.py
@@ -7,44 +7,34 @@
from functools import partial
from pathlib import Path
from time import time
-from typing import Callable, List, Optional, Union
+from typing import Callable, List, Optional, Tuple, Union
import click
import zstd
from blspy import AugSchemeMPL, G1Element
-from chia_rs import MEMPOOL_MODE, run_generator
+from chia_rs import MEMPOOL_MODE, SpendBundleConditions, run_block_generator
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.types.block_protocol import BlockInfo
-from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.serialized_program import SerializedProgram
from chia.types.blockchain_format.sized_bytes import bytes32, bytes48
from chia.types.full_block import FullBlock
from chia.util.condition_tools import pkm_pairs
from chia.util.full_block_utils import block_info_from_block, generator_from_block
-from chia.wallet.puzzles.rom_bootstrap_generator import get_generator
-GENERATOR_ROM = bytes(get_generator())
-
-# returns an optional error code and an optional PySpendBundleConditions (from chia_rs)
+# returns an optional error code and an optional SpendBundleConditions (from chia_rs)
# exactly one of those will hold a value and the number of seconds it took to
# run
-def run_gen(env_data: bytes, block_program_args: bytes, flags: int):
- max_cost = DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM
- cost_per_byte = DEFAULT_CONSTANTS.COST_PER_BYTE
-
- # we don't charge for the size of the generator ROM. However, we do charge
- # cost for the operations it executes
- max_cost -= len(env_data) * cost_per_byte
-
- env_data = b"\xff" + env_data + b"\xff" + block_program_args + b"\x80"
-
+def run_gen(
+ generator_program: SerializedProgram, block_program_args: List[bytes], flags: int
+) -> Tuple[Optional[int], Optional[SpendBundleConditions], float]:
try:
start_time = time()
- err, result = run_generator(
- GENERATOR_ROM,
- env_data,
- max_cost,
+ err, result = run_block_generator(
+ bytes(generator_program),
+ block_program_args,
+ DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
flags,
)
run_time = time() - start_time
@@ -131,17 +121,12 @@ def default_call(
num_refs = len(generator_blobs)
# add the block program arguments
- block_program_args = bytearray(b"\xff")
- for ref_block_blob in generator_blobs:
- block_program_args += b"\xff"
- block_program_args += Program.to(ref_block_blob).as_bin()
- block_program_args += b"\x80\x80"
-
assert block.transactions_generator is not None
- err, result, run_time = run_gen(bytes(block.transactions_generator), bytes(block_program_args), flags)
+ err, result, run_time = run_gen(block.transactions_generator, generator_blobs, flags)
if err is not None:
sys.stderr.write(f"ERROR: {hh.hex()} {height} {err}\n")
return
+ assert result is not None
num_removals = len(result.spends)
fees = result.reserve_fee
@@ -155,7 +140,7 @@ def default_call(
# create hash_key list for aggsig check
pairs_pks: List[bytes48] = []
pairs_msgs: List[bytes] = []
- pairs_pks, pairs_msgs = pkm_pairs(result, DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA, soft_fork=False)
+ pairs_pks, pairs_msgs = pkm_pairs(result, DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA)
pairs_g1s = [G1Element.from_bytes(x) for x in pairs_pks]
assert block.transactions_info is not None
assert block.transactions_info.aggregated_signature is not None
diff --git a/tools/chialispp.py b/tools/chialispp.py
new file mode 100644
--- /dev/null
+++ b/tools/chialispp.py
@@ -0,0 +1,356 @@
+from __future__ import annotations
+
+import sys
+from pathlib import Path
+from typing import Any, Dict, List, Optional
+
+
+# A simple class for separating a line into code and comment
+class Line:
+ def __init__(self, code: List[bytes], comment: Optional[List[bytes]]):
+ self.code = code
+ self.comment = comment
+
+
+# Remove all whitespace from the beginning of a byte array
+def trim_ascii_start(line: List[bytes]) -> List[bytes]:
+ first_non_ws: int = 0
+ got_one: bool = False
+
+ for i, ch in enumerate(line):
+ if not (ch.decode("ascii").isspace()):
+ got_one = True
+ first_non_ws = i
+ break
+
+ if not got_one:
+ return []
+ else:
+ return line[first_non_ws:]
+
+
+# Remove all whitespace from the end of a byte array
+def trim_ascii_end(line: List[bytes]) -> List[bytes]:
+ last_non_ws: int = 0
+ got_one: bool = False
+
+ for i, ch in enumerate(line):
+ if (not ch.decode("ascii").isspace()) and ch[0] <= 127:
+ got_one = True
+ last_non_ws = i
+
+ if not got_one:
+ return []
+ else:
+ return line[0 : last_non_ws + 1]
+
+
+class Formatter:
+ def __init__(self) -> None:
+ self.start_paren_level: int = 0
+ self.paren_level: int = 0
+ self.out_col: int = 0 # The colum we are at while outputting a line
+ self.cur_line: int = 0
+ self.line: List[bytes] = []
+ self.comment: Optional[List[bytes]] = None
+ self.lines: List[List[bytes]] = []
+ self.work_lines: List[Line] = []
+ self.getting_form_name: int = 0
+ self.got_form_on_line: int = 0
+ self.form_name: List[bytes] = []
+ self.reset_form_indent: bool = False
+ # self.def_started = False
+ self.result_line: List[bytes] = []
+ # self.definition_starts = []
+ # self.extra_def_lines = []
+ self.indent_stack: List[int] = []
+ self.result: List[List[bytes]] = []
+ self.config: Dict[str, Any] = {
+ "gnu_comment_conventions": False,
+ }
+
+ # Add a character of source, breaking the source into lines as we go
+ def run_char(self, ch: bytes) -> None:
+ if ch == b"\n":
+ self.finish_line()
+ else:
+ self.line.append(ch)
+
+ # Process a single character and add it to the final result
+ def output_char(self, ch: bytes) -> None:
+ if ch == b"\n":
+ self.work_lines.append(Line(self.result_line, self.comment))
+ self.result_line = []
+ self.comment = None
+ self.out_col = 0
+ else:
+ self.result_line.append(ch)
+ self.out_col += 1
+
+ # Process a line and add it to the work_lines array
+ def output_line(self) -> None:
+ line_indent = self.get_cur_indent()
+ max_paren_level = self.paren_level
+ self.start_paren_level = self.paren_level
+ starting_indent_len = len(self.indent_stack)
+
+ if not self.line:
+ self.output_char(b"\n")
+ return
+
+ # Get a line from the unprocessed lines
+ line = trim_ascii_end(self.line)
+ line = trim_ascii_start(line)
+ self.line.clear()
+
+ # Some variables to be aware of whether or not we're in a string literal
+ in_string = None
+ string_bs = False # bs == backslash
+
+ # Some variables to be aware of whether or not we're in a comment
+ semis = 0 # number of semi colons starting a comment
+ semi_off = 0 # the column where the comment starts
+ comment = [] # The comment byte array
+
+ # Main loop to format the line
+ for i, ch in enumerate(line):
+ # Track the form name
+ if self.getting_form_name > 0:
+ self.reset_form_indent = False
+ if self.getting_form_name == 1 and not (ch == b" "):
+ self.getting_form_name = 2
+ self.form_name.append(ch)
+ elif self.getting_form_name == 2 and ch in (b" ", b"(", b")"):
+ self.getting_form_name = 0
+ self.got_form_on_line = self.cur_line
+ else:
+ self.form_name.append(ch)
+
+ # if self.start_paren_level == 1 and not self.def_started:
+ # self.def_started = True
+ # self.definition_starts.append(len(self.work_lines))
+
+ # Special indentation rules for `if`
+ should_reset_indent = (
+ self.getting_form_name == 0
+ and self.form_name == [b"i", b"f"]
+ and not (ch == b" ")
+ and not self.reset_form_indent
+ )
+
+ # Be sure to not format string literals as code
+ if string_bs:
+ string_bs = False
+ continue
+ if in_string is not None:
+ if ch == b"\\":
+ string_bs = True
+ if ch == in_string:
+ in_string = None
+ continue
+
+ if semis == 0:
+ # We've entered a string, stop processing
+ if ch == b"'" or ch == b'"':
+ in_string = ch
+ continue
+ elif ch == b"(":
+ self.paren_level += 1
+ if self.paren_level > max_paren_level:
+ max_paren_level = self.paren_level
+
+ if should_reset_indent:
+ self.reset_indent(line_indent + i)
+ self.reset_form_indent = True
+ self.indent_paren()
+
+ self.form_name.clear()
+ self.got_form_on_line = 0
+ self.getting_form_name = 1
+ continue
+ elif ch == b")":
+ indentation_diff: int = (self.indent_stack[-1] if len(self.indent_stack) > 0 else 0) - (
+ self.indent_stack[-2] if len(self.indent_stack) > 1 else 0
+ )
+ self.retire_indent()
+ if self.paren_level <= self.start_paren_level:
+ line_indent -= indentation_diff
+ self.paren_level -= 1
+ continue
+ elif should_reset_indent:
+ self.reset_indent(line_indent + i)
+ self.reset_form_indent = True
+
+ if ch == b";":
+ if semis == 0:
+ semi_off = i
+ semis += 1
+ elif semis > 0:
+ comment = line[i:]
+ line = trim_ascii_end(line[:semi_off])
+ break
+
+ if semis + semi_off == len(line):
+ line = trim_ascii_end(line[:semi_off])
+
+ line = trim_ascii_end(line)
+
+ if semis == 1 and not line and self.config["gnu_comment_conventions"]:
+ semis = 0
+ self.comment = comment
+ comment = []
+ else:
+ self.comment = None
+
+ if semis > 0:
+ if semis < 3 or not self.config["gnu_comment_conventions"]:
+ self.indent(line_indent)
+ if line and not self.config["gnu_comment_conventions"]:
+ for co in line:
+ self.output_char(co)
+ self.output_char(b" ")
+ self.output_char(b" ")
+ for _i in range(semis):
+ self.output_char(b";")
+ for co in comment:
+ self.output_char(co)
+ if line and self.config["gnu_comment_conventions"]:
+ # Code after comment in this scenario
+ self.output_char(b"\n")
+ self.indent(line_indent)
+ for co in line:
+ self.output_char(co)
+ elif line != []:
+ self.indent(line_indent)
+ for co in line:
+ self.output_char(co)
+
+ self.output_char(b"\n")
+
+ # We never want the next line to be more indented than us + 1 unit
+ if len(self.indent_stack) > starting_indent_len + 1:
+ for i in range(starting_indent_len + 1, len(self.indent_stack)):
+ self.indent_stack[i] = self.indent_stack[starting_indent_len + 1]
+
+ # if max_paren_level > 1 and self.paren_level == 1:
+ # self.def_started = False
+ # self.extra_def_lines.append(len(self.work_lines))
+
+ # Add our current line to our lines array and reset the line
+ def finish_line(self) -> None:
+ self.lines.append(self.line.copy())
+ self.line.clear()
+ self.comment = None
+
+ def finish(self) -> None:
+ if self.line:
+ self.finish_line()
+
+ for i in range(len(self.lines)):
+ self.line = self.lines[i]
+ self.cur_line = i
+ self.output_line()
+
+ next_handle_line = 0
+ for i in range(len(self.work_lines)):
+ if i < next_handle_line:
+ continue
+
+ # Find the max comment spacing needed and output the group.
+ # Skip if already handled.
+ comment = self.work_lines[i].comment
+ if comment is not None:
+ comment_offset = len(self.work_lines[i].code)
+ comments = [comment]
+ for j in range(i + 1, len(self.lines)):
+ comment = self.work_lines[j].comment
+ if comment is not None:
+ comments.append(comment)
+ comment_offset = max(comment_offset, len(self.work_lines[j].code))
+ else:
+ next_handle_line = j
+ break
+
+ for j, comment in enumerate(comments):
+ line = self.work_lines[i + j].code.copy()
+ while len(line) < comment_offset:
+ line.append(b" ")
+ line.append(b" ")
+ line.append(b";")
+ line += comment[:]
+ self.result.append(line)
+ else:
+ self.result.append(self.work_lines[i].code.copy())
+
+ # el_idx = 0
+ # inserted = 0
+ #
+ # for ds in self.definition_starts[0:len(self.definition_starts)]:
+ # while el_idx < len(self.extra_def_lines) and self.extra_def_lines[el_idx] < ds:
+ # el_idx += 1
+ #
+ # if el_idx >= len(self.extra_def_lines):
+ # break
+ #
+ # el = self.extra_def_lines[el_idx]
+ # if el <= ds + 1:
+ # insert_at = el + inserted
+ # self.result.insert(insert_at, [])
+ # inserted += 1
+
+ # We maintain a stack of indentation levels
+ # The following functions maintain that stack
+ def indent(self, cur_indent: int) -> None:
+ while self.out_col < cur_indent:
+ self.output_char(b" ")
+
+ def get_cur_indent(self) -> int:
+ if self.indent_stack:
+ return self.indent_stack[-1]
+ else:
+ return 0
+
+ def reset_indent(self, i: int) -> None:
+ if self.indent_stack:
+ self.indent_stack[-1] = i
+
+ def indent_paren(self) -> None:
+ current_indent = self.indent_stack[-1] if self.indent_stack else 0
+ self.indent_stack.append(current_indent + 2)
+
+ def retire_indent(self) -> None:
+ if self.indent_stack:
+ self.indent_stack.pop()
+
+
+def concat_byte_array(bs: List[bytes]) -> bytes:
+ return b"".join(bs)
+
+
+def main() -> None:
+ for arg in sys.argv[1:]:
+ path = Path(arg)
+ if path.is_dir():
+ all_paths = [*path.rglob("*.clsp"), *path.rglob("*.clib")]
+ else:
+ all_paths = [path]
+
+ for filename in all_paths:
+ with open(filename, "rb") as f:
+ filedata = f.read()
+
+ formatter = Formatter()
+
+ for ch in filedata:
+ formatter.run_char(bytes([ch]))
+
+ formatter.finish()
+
+ with open(filename, "wb") as f:
+ for i, line in enumerate(formatter.result):
+ f.write(concat_byte_array(line))
+ f.write(b"\n")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/generate_chain.py b/tools/generate_chain.py
--- a/tools/generate_chain.py
+++ b/tools/generate_chain.py
@@ -23,6 +23,9 @@
@contextmanager
def enable_profiler(profile: bool, counter: int) -> Iterator[None]:
+ if sys.version_info < (3, 8):
+ raise Exception(f"Python 3.8 or higher required, running with: {sys.version}")
+
if not profile:
yield
return
diff --git a/tools/manage_clvm.py b/tools/manage_clvm.py
--- a/tools/manage_clvm.py
+++ b/tools/manage_clvm.py
@@ -30,19 +30,21 @@
from chia.types.blockchain_format.serialized_program import SerializedProgram # noqa: E402
clvm_suffix = ".clvm"
-hex_suffix = ".clvm.hex"
-hash_suffix = ".clvm.hex.sha256tree"
-all_suffixes = {"clvm": clvm_suffix, "hex": hex_suffix, "hash": hash_suffix}
-# TODO: could be cli options
+clsp_suffix = ".clsp"
+hex_suffix = ".clsp.hex"
+all_suffixes = {"clsp": clsp_suffix, "hex": hex_suffix, "clvm": clvm_suffix}
+# TODO: these could be cli options
top_levels = {"chia"}
+hashes_path = root.joinpath("chia/wallet/puzzles/deployed_puzzle_hashes.json")
+std_libraries = root.joinpath("chia/wallet/puzzles")
class ManageClvmError(Exception):
pass
-class CacheEntry(typing.TypedDict):
- clvm: str
+class CacheEntry(typing_extensions.TypedDict):
+ clsp: str
hex: str
hash: str
@@ -68,7 +70,7 @@ def __init__(self, found_version: object, expected_version: CacheVersion) -> Non
super().__init__(f"Cache has wrong version, expected {expected_version!r} got: {found_version!r}")
-class Cache(typing.TypedDict):
+class Cache(typing_extensions.TypedDict):
entries: CacheEntries
version: CacheVersion
@@ -100,8 +102,7 @@ def dump_cache(cache: Cache, file: typing.IO[str]) -> None:
def generate_hash_bytes(hex_bytes: bytes) -> bytes:
cleaned_blob = bytes.fromhex(hex_bytes.decode("utf-8"))
serialize_program = SerializedProgram.from_bytes(cleaned_blob)
- result = serialize_program.get_tree_hash().hex()
- return (result + "\n").encode("utf-8")
+ return serialize_program.get_tree_hash()
@typing_extensions.final
@@ -109,14 +110,23 @@ def generate_hash_bytes(hex_bytes: bytes) -> bytes:
class ClvmPaths:
clvm: pathlib.Path
hex: pathlib.Path
- hash: pathlib.Path
+ hash: str
+ missing_files: typing.List[str]
@classmethod
- def from_clvm(cls, clvm: pathlib.Path) -> ClvmPaths:
+ def from_clvm(cls, clvm: pathlib.Path, hash_dict: typing.Dict[str, str] = {}) -> ClvmPaths:
+ stem_filename = clvm.name[: -len(clsp_suffix)]
+ hex_path = clvm.with_name(stem_filename + hex_suffix)
+ missing_files = []
+ if not hex_path.exists():
+ missing_files.append(str(hex_path))
+ if stem_filename not in hash_dict:
+ missing_files.append(f"{stem_filename} entry in {hashes_path}")
return cls(
clvm=clvm,
- hex=clvm.with_name(clvm.name[: -len(clvm_suffix)] + hex_suffix),
- hash=clvm.with_name(clvm.name[: -len(clvm_suffix)] + hash_suffix),
+ hex=hex_path,
+ hash=stem_filename,
+ missing_files=missing_files,
)
@@ -127,10 +137,13 @@ class ClvmBytes:
hash: bytes
@classmethod
- def from_clvm_paths(cls, paths: ClvmPaths) -> ClvmBytes:
+ def from_clvm_paths(cls, paths: ClvmPaths, hash_dict: typing.Dict[str, str] = {}) -> ClvmBytes:
+ hex_bytes = paths.hex.read_bytes()
return cls(
- hex=paths.hex.read_bytes(),
- hash=paths.hash.read_bytes(),
+ hex=hex_bytes,
+ hash=bytes.fromhex(hash_dict[paths.hash])
+ if paths.hash in hash_dict
+ else generate_hash_bytes(hex_bytes=hex_bytes),
)
@classmethod
@@ -142,7 +155,7 @@ def from_hex_bytes(cls, hex_bytes: bytes) -> ClvmBytes:
# These files have the wrong extension for now so we'll just manually exclude them
-excludes = {"condition_codes.clvm", "create-lock-puzzlehash.clvm"}
+excludes: typing.Set[str] = set()
def find_stems(
@@ -173,7 +186,7 @@ def create_cache_entry(reference_paths: ClvmPaths, reference_bytes: ClvmBytes) -
hash_hasher.update(reference_bytes.hash)
return {
- "clvm": clvm_hasher.hexdigest(),
+ "clsp": clvm_hasher.hexdigest(),
"hex": hex_hasher.hexdigest(),
"hash": hash_hasher.hexdigest(),
}
@@ -190,6 +203,8 @@ def check(use_cache: bool) -> int:
used_excludes = set()
overall_fail = False
+ HASHES: typing.Dict[str, str] = json.loads(hashes_path.read_text()) if hashes_path.exists() else {}
+
cache: Cache
if not use_cache:
cache = create_empty_cache()
@@ -212,27 +227,43 @@ def check(use_cache: bool) -> int:
cache_modified = False
found_stems = find_stems(top_levels)
- for name in ["hex", "hash"]:
- found = found_stems[name]
- suffix = all_suffixes[name]
- extra = found - found_stems["clvm"]
+ found = found_stems["hex"]
+ suffix = all_suffixes["hex"]
+ extra = found - found_stems["clsp"]
- print()
- print(f"Extra {suffix} files:")
+ print()
+ print(f"Extra {suffix} files:")
- if len(extra) == 0:
- print(" -")
- else:
- overall_fail = True
- for stem in extra:
- print(f" {stem.with_name(stem.name + suffix)}")
+ if len(extra) == 0:
+ print(" -")
+ else:
+ overall_fail = True
+ for stem in extra:
+ print(f" {stem.with_name(stem.name + suffix)}")
print()
- print("Checking that all existing .clvm files compile to .clvm.hex that match existing caches:")
+ print("Checking that no .clvm files begin with `(mod`")
for stem_path in sorted(found_stems["clvm"]):
- clvm_path = stem_path.with_name(stem_path.name + clvm_suffix)
- if clvm_path.name in excludes:
- used_excludes.add(clvm_path.name)
+ with open(stem_path.with_name(stem_path.name + clvm_suffix)) as file:
+ file_lines = file.readlines()
+ for line in file_lines:
+ non_comment: str = line.split(";")[0]
+ if "(" in non_comment:
+ paren_index: int = non_comment.find("(")
+ if len(non_comment) >= paren_index + 4 and non_comment[paren_index : paren_index + 4] == "(mod":
+ overall_fail = True
+ print(f"FAIL : {stem_path.name + clvm_suffix} contains `(mod`")
+ break
+
+ missing_files: typing.List[str] = []
+ all_hash_stems: typing.List[str] = []
+
+ print()
+ print("Checking that all existing .clsp files compile to .clsp.hex that match existing caches:")
+ for stem_path in sorted(found_stems["clsp"]):
+ clsp_path = stem_path.with_name(stem_path.name + clsp_suffix)
+ if clsp_path.name in excludes:
+ used_excludes.add(clsp_path.name)
continue
file_fail = False
@@ -240,8 +271,12 @@ def check(use_cache: bool) -> int:
cache_key = str(stem_path)
try:
- reference_paths = ClvmPaths.from_clvm(clvm=clvm_path)
- reference_bytes = ClvmBytes.from_clvm_paths(paths=reference_paths)
+ reference_paths = ClvmPaths.from_clvm(clvm=clsp_path, hash_dict=HASHES)
+ if reference_paths.missing_files != []:
+ missing_files.extend(reference_paths.missing_files)
+ continue
+ all_hash_stems.append(reference_paths.hash)
+ reference_bytes = ClvmBytes.from_clvm_paths(paths=reference_paths, hash_dict=HASHES)
new_cache_entry = create_cache_entry(reference_paths=reference_paths, reference_bytes=reference_bytes)
existing_cache_entry = cache_entries.get(cache_key)
@@ -250,13 +285,14 @@ def check(use_cache: bool) -> int:
if not cache_hit:
with tempfile.TemporaryDirectory() as temporary_directory:
generated_paths = ClvmPaths.from_clvm(
- clvm=pathlib.Path(temporary_directory).joinpath(f"generated{clvm_suffix}")
+ clvm=pathlib.Path(temporary_directory).joinpath(reference_paths.clvm.name),
+ hash_dict=HASHES,
)
compile_clvm(
input_path=os.fspath(reference_paths.clvm),
output_path=os.fspath(generated_paths.hex),
- search_paths=[os.fspath(reference_paths.clvm.parent)],
+ search_paths=[os.fspath(reference_paths.clvm.parent), str(std_libraries)],
)
generated_bytes = ClvmBytes.from_hex_bytes(hex_bytes=generated_paths.hex.read_bytes())
@@ -273,15 +309,22 @@ def check(use_cache: bool) -> int:
error = traceback.format_exc()
if file_fail:
- print(f"FAIL : {clvm_path}")
+ print(f"FAIL : {clsp_path}")
if error is not None:
print(error)
else:
- print(f" pass: {clvm_path}")
+ print(f" pass: {clsp_path}")
if file_fail:
overall_fail = True
+ if missing_files != []:
+ overall_fail = True
+ print()
+ print("Missing files (run tools/manage_clvm.py build to build them):")
+ for filename in missing_files:
+ print(f" - {filename}")
+
unused_excludes = sorted(excludes - used_excludes)
if len(unused_excludes) > 0:
overall_fail = True
@@ -291,60 +334,89 @@ def check(use_cache: bool) -> int:
for exclude in unused_excludes:
print(f" {exclude}")
+ extra_hashes = HASHES.keys() - all_hash_stems
+ if len(extra_hashes) != 0:
+ overall_fail = True
+ print()
+ print("Hashes without corresponding files:")
+ for extra_hash in extra_hashes:
+ print(f" {extra_hash}")
+
if use_cache and cache_modified:
cache_path.parent.mkdir(parents=True, exist_ok=True)
with cache_path.open(mode="w") as file:
dump_cache(cache=cache, file=file)
- return 1 if overall_fail else 0
+ sys.exit(1 if overall_fail else 0)
@main.command()
def build() -> int:
overall_fail = False
- found_stems = find_stems(top_levels, suffixes={"clvm": clvm_suffix})
+ HASHES: typing.Dict[str, str] = json.loads(hashes_path.read_text()) if hashes_path.exists() else {}
- print(f"Building all existing {clvm_suffix} files to {hex_suffix}:")
- for stem_path in sorted(found_stems["clvm"]):
- clvm_path = stem_path.with_name(stem_path.name + clvm_suffix)
- if clvm_path.name in excludes:
+ found_stems = find_stems(top_levels, suffixes={"clsp": clsp_suffix})
+ hash_stems = []
+ new_hashes = HASHES.copy()
+
+ print(f"Building all existing {clsp_suffix} files to {hex_suffix}:")
+ for stem_path in sorted(found_stems["clsp"]):
+ clsp_path = stem_path.with_name(stem_path.name + clsp_suffix)
+ if clsp_path.name in excludes:
continue
file_fail = False
error = None
try:
- reference_paths = ClvmPaths.from_clvm(clvm=clvm_path)
+ reference_paths = ClvmPaths.from_clvm(clvm=clsp_path, hash_dict=HASHES)
with tempfile.TemporaryDirectory() as temporary_directory:
generated_paths = ClvmPaths.from_clvm(
- clvm=pathlib.Path(temporary_directory).joinpath(f"generated{clvm_suffix}")
+ clvm=pathlib.Path(temporary_directory).joinpath(reference_paths.clvm.name),
+ hash_dict=HASHES,
)
compile_clvm(
input_path=os.fspath(reference_paths.clvm),
output_path=os.fspath(generated_paths.hex),
- search_paths=[os.fspath(reference_paths.clvm.parent)],
+ search_paths=[os.fspath(reference_paths.clvm.parent), str(std_libraries)],
)
generated_bytes = ClvmBytes.from_hex_bytes(hex_bytes=generated_paths.hex.read_bytes())
reference_paths.hex.write_bytes(generated_bytes.hex)
+
+ # Only add hashes to json file if they didn't already exist in it
+ hash_stems.append(reference_paths.hash)
+ if reference_paths.hash not in new_hashes:
+ new_hashes[reference_paths.hash] = ClvmBytes.from_clvm_paths(
+ reference_paths, hash_dict=HASHES
+ ).hash.hex()
except Exception:
file_fail = True
error = traceback.format_exc()
if file_fail:
- print(f"FAIL : {clvm_path}")
+ print(f"FAIL : {clsp_path}")
if error is not None:
print(error)
else:
- print(f" built: {clvm_path}")
+ print(f" built: {clsp_path}")
if file_fail:
overall_fail = True
- return 1 if overall_fail else 0
+ hashes_path.write_text(
+ json.dumps(
+ {key: value for key, value in new_hashes.items() if key in hash_stems}, # filter out not found files
+ indent=4,
+ sort_keys=True,
+ )
+ + "\n"
+ )
+
+ sys.exit(1 if overall_fail else 0)
-sys.exit(main(auto_envvar_prefix="CHIA_MANAGE_CLVM"))
+main(auto_envvar_prefix="CHIA_MANAGE_CLVM")
diff --git a/tools/run_block.py b/tools/run_block.py
--- a/tools/run_block.py
+++ b/tools/run_block.py
@@ -61,7 +61,7 @@
from chia.wallet.uncurried_puzzle import uncurry_puzzle
DESERIALIZE_MOD = load_serialized_clvm_maybe_recompile(
- "chialisp_deserialisation.clvm", package_or_requirement="chia.wallet.puzzles"
+ "chialisp_deserialisation.clsp", package_or_requirement="chia.wallet.puzzles"
)
| diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -131,23 +131,34 @@ jobs:
- name: Coverage Processing
run: |
- coverage combine --rcfile=.coveragerc --data-file coverage-reports/.coverage coverage-data/
- coverage xml --rcfile=.coveragerc --data-file coverage-reports/.coverage -o coverage-reports/coverage.xml
- coverage html --rcfile=.coveragerc --data-file coverage-reports/.coverage --directory coverage-reports/html/
+ coverage combine --rcfile=.coveragerc --data-file=coverage-reports/.coverage coverage-data/
+ coverage xml --rcfile=.coveragerc --data-file=coverage-reports/.coverage -o coverage-reports/coverage.xml
+ coverage html --rcfile=.coveragerc --data-file=coverage-reports/.coverage --directory coverage-reports/html/
+
+ - uses: coverallsapp/github-action@v2
+ if: always()
+ env:
+ COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- name: Coverage report (chia/)
+ if: always()
run: |
- coverage report --rcfile=.coveragerc --data-file coverage-reports/.coverage --include='chia/*' --show-missing | tee coverage-reports/coverage-chia-stdout
+ set -o pipefail
+ coverage report --rcfile=.coveragerc --data-file=coverage-reports/.coverage --include='chia/**/*' --show-missing | tee coverage-reports/coverage-chia-stdout
- name: Coverage report (tests/)
+ if: always()
run: |
- coverage report --rcfile=.coveragerc --data-file coverage-reports/.coverage --include='tests/*' --show-missing | tee coverage-reports/coverage-tests-stdout
+ set -o pipefail
+ coverage report --rcfile=.coveragerc --data-file=coverage-reports/.coverage --include='tests/**/*' --show-missing | tee coverage-reports/coverage-tests-stdout
- name: Coverage report (diff)
+ if: always()
env:
compare-branch: ${{ github.base_ref == '' && github.event.before || format('origin/{0}', github.base_ref) }}
run: |
- diff-cover --compare-branch=${{ env.compare-branch }} --html-report coverage-reports/diff-cover.html --markdown-report coverage-reports/diff-cover.md coverage-reports/coverage.xml | tee coverage-reports/diff-cover-stdout
+ set -o pipefail
+ diff-cover --compare-branch=${{ env.compare-branch }} --fail-under=100 --html-report=coverage-reports/diff-cover.html --markdown-report=coverage-reports/diff-cover.md coverage-reports/coverage.xml | tee coverage-reports/diff-cover-stdout
cat coverage-reports/diff-cover.md >> $GITHUB_STEP_SUMMARY
- name: Publish coverage reports
diff --git a/chia/wallet/puzzles/test_generator_deserialize.clvm b/chia/wallet/puzzles/test_generator_deserialize.clsp
similarity index 100%
rename from chia/wallet/puzzles/test_generator_deserialize.clvm
rename to chia/wallet/puzzles/test_generator_deserialize.clsp
diff --git a/chia/wallet/puzzles/test_generator_deserialize.clvm.hex b/chia/wallet/puzzles/test_generator_deserialize.clsp.hex
similarity index 100%
rename from chia/wallet/puzzles/test_generator_deserialize.clvm.hex
rename to chia/wallet/puzzles/test_generator_deserialize.clsp.hex
diff --git a/chia/wallet/puzzles/test_generator_deserialize.clvm.hex.sha256tree b/chia/wallet/puzzles/test_generator_deserialize.clvm.hex.sha256tree
deleted file mode 100644
--- a/chia/wallet/puzzles/test_generator_deserialize.clvm.hex.sha256tree
+++ /dev/null
@@ -1 +0,0 @@
-52add794fc76e89512e4a063c383418bda084c8a78c74055abe80179e4a7832c
diff --git a/chia/wallet/puzzles/test_multiple_generator_input_arguments.clvm b/chia/wallet/puzzles/test_multiple_generator_input_arguments.clsp
similarity index 71%
rename from chia/wallet/puzzles/test_multiple_generator_input_arguments.clvm
rename to chia/wallet/puzzles/test_multiple_generator_input_arguments.clsp
--- a/chia/wallet/puzzles/test_multiple_generator_input_arguments.clvm
+++ b/chia/wallet/puzzles/test_multiple_generator_input_arguments.clsp
@@ -3,9 +3,9 @@
(defun decompress_cses (decompress_puzzle decompress_coin_spend_entry cses deserialize puzzle_prefix)
(if cses
- (c (a decompress_coin_spend_entry (list deserialize decompress_puzzle puzzle_prefix (f cses)))
- (decompress_cses decompress_puzzle decompress_coin_spend_entry (r cses) deserialize puzzle_prefix ))
- ()) )
+ (c (a decompress_coin_spend_entry (list deserialize decompress_puzzle puzzle_prefix (f cses)))
+ (decompress_cses decompress_puzzle decompress_coin_spend_entry (r cses) deserialize puzzle_prefix ))
+ ()) )
(defun join_gen_args (generators start1 end1 start2 end2)
(concat
diff --git a/chia/wallet/puzzles/test_multiple_generator_input_arguments.clvm.hex b/chia/wallet/puzzles/test_multiple_generator_input_arguments.clsp.hex
similarity index 100%
rename from chia/wallet/puzzles/test_multiple_generator_input_arguments.clvm.hex
rename to chia/wallet/puzzles/test_multiple_generator_input_arguments.clsp.hex
diff --git a/chia/wallet/puzzles/test_multiple_generator_input_arguments.clvm.hex.sha256tree b/chia/wallet/puzzles/test_multiple_generator_input_arguments.clvm.hex.sha256tree
deleted file mode 100644
--- a/chia/wallet/puzzles/test_multiple_generator_input_arguments.clvm.hex.sha256tree
+++ /dev/null
@@ -1 +0,0 @@
-156dafbddc3e1d3bfe1f2a84e48e5e46b287b8358bf65c3c091c93e855fbfc5b
diff --git a/tests/blockchain/blockchain_test_utils.py b/tests/blockchain/blockchain_test_utils.py
--- a/tests/blockchain/blockchain_test_utils.py
+++ b/tests/blockchain/blockchain_test_utils.py
@@ -1,10 +1,12 @@
-from typing import Optional, List
+from __future__ import annotations
-from chia.consensus.blockchain import Blockchain, AddBlockResult
+from typing import List, Optional
+
+from chia.consensus.blockchain import AddBlockResult, Blockchain
from chia.consensus.multiprocess_validation import PreValidationResult
from chia.types.full_block import FullBlock
from chia.util.errors import Err
-from chia.util.ints import uint64, uint32
+from chia.util.ints import uint32, uint64
async def check_block_store_invariant(bc: Blockchain):
diff --git a/tests/blockchain/test_blockchain.py b/tests/blockchain/test_blockchain.py
--- a/tests/blockchain/test_blockchain.py
+++ b/tests/blockchain/test_blockchain.py
@@ -1486,55 +1486,81 @@ async def test_bad_filter_hash(self, empty_blockchain, bt):
await _validate_and_add_block(empty_blockchain, blocks[-1])
@pytest.mark.asyncio
- async def test_bad_timestamp(self, empty_blockchain, bt):
+ @pytest.mark.parametrize("with_softfork2", [False, True])
+ async def test_bad_timestamp(self, bt, with_softfork2):
# 26
+ if with_softfork2:
+ # enable softfork2 at height 0, to make it apply to this test
+ # the test constants set MAX_FUTURE_TIME to 10 days, restore it to
+ # default for this test
+ constants = test_constants.replace(SOFT_FORK2_HEIGHT=0, MAX_FUTURE_TIME=5 * 60)
+ time_delta = 2 * 60
+ else:
+ constants = test_constants.replace(MAX_FUTURE_TIME=5 * 60)
+ time_delta = 5 * 60
+
blocks = bt.get_consecutive_blocks(1)
- await _validate_and_add_block(empty_blockchain, blocks[0])
- while True:
- blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
- if blocks[-1].foliage_transaction_block is not None:
- block_bad: FullBlock = recursive_replace(
- blocks[-1],
- "foliage_transaction_block.timestamp",
- blocks[0].foliage_transaction_block.timestamp - 10,
- )
- block_bad: FullBlock = recursive_replace(
- block_bad, "foliage.foliage_transaction_block_hash", block_bad.foliage_transaction_block.get_hash()
- )
- new_m = block_bad.foliage.foliage_transaction_block_hash
- new_fbh_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
- block_bad = recursive_replace(block_bad, "foliage.foliage_transaction_block_signature", new_fbh_sig)
- await _validate_and_add_block(empty_blockchain, block_bad, expected_error=Err.TIMESTAMP_TOO_FAR_IN_PAST)
- block_bad: FullBlock = recursive_replace(
- blocks[-1],
- "foliage_transaction_block.timestamp",
- blocks[0].foliage_transaction_block.timestamp,
- )
- block_bad: FullBlock = recursive_replace(
- block_bad, "foliage.foliage_transaction_block_hash", block_bad.foliage_transaction_block.get_hash()
- )
- new_m = block_bad.foliage.foliage_transaction_block_hash
- new_fbh_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
- block_bad = recursive_replace(block_bad, "foliage.foliage_transaction_block_signature", new_fbh_sig)
- await _validate_and_add_block(empty_blockchain, block_bad, expected_error=Err.TIMESTAMP_TOO_FAR_IN_PAST)
+ async with make_empty_blockchain(constants) as b:
+ await _validate_and_add_block(b, blocks[0])
+ while True:
+ blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
+ if blocks[-1].foliage_transaction_block is not None:
+ block_bad: FullBlock = recursive_replace(
+ blocks[-1],
+ "foliage_transaction_block.timestamp",
+ blocks[0].foliage_transaction_block.timestamp - 10,
+ )
+ block_bad: FullBlock = recursive_replace(
+ block_bad,
+ "foliage.foliage_transaction_block_hash",
+ block_bad.foliage_transaction_block.get_hash(),
+ )
+ new_m = block_bad.foliage.foliage_transaction_block_hash
+ new_fbh_sig = bt.get_plot_signature(
+ new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key
+ )
+ block_bad = recursive_replace(block_bad, "foliage.foliage_transaction_block_signature", new_fbh_sig)
+ await _validate_and_add_block(b, block_bad, expected_error=Err.TIMESTAMP_TOO_FAR_IN_PAST)
- block_bad: FullBlock = recursive_replace(
- blocks[-1],
- "foliage_transaction_block.timestamp",
- blocks[0].foliage_transaction_block.timestamp + 10000000,
- )
- block_bad: FullBlock = recursive_replace(
- block_bad, "foliage.foliage_transaction_block_hash", block_bad.foliage_transaction_block.get_hash()
- )
- new_m = block_bad.foliage.foliage_transaction_block_hash
- new_fbh_sig = bt.get_plot_signature(new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key)
- block_bad = recursive_replace(block_bad, "foliage.foliage_transaction_block_signature", new_fbh_sig)
- await _validate_and_add_block(
- empty_blockchain, block_bad, expected_error=Err.TIMESTAMP_TOO_FAR_IN_FUTURE
- )
- return None
- await _validate_and_add_block(empty_blockchain, blocks[-1])
+ block_bad: FullBlock = recursive_replace(
+ blocks[-1],
+ "foliage_transaction_block.timestamp",
+ blocks[0].foliage_transaction_block.timestamp,
+ )
+ block_bad: FullBlock = recursive_replace(
+ block_bad,
+ "foliage.foliage_transaction_block_hash",
+ block_bad.foliage_transaction_block.get_hash(),
+ )
+ new_m = block_bad.foliage.foliage_transaction_block_hash
+ new_fbh_sig = bt.get_plot_signature(
+ new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key
+ )
+ block_bad = recursive_replace(block_bad, "foliage.foliage_transaction_block_signature", new_fbh_sig)
+ await _validate_and_add_block(b, block_bad, expected_error=Err.TIMESTAMP_TOO_FAR_IN_PAST)
+
+ # since tests can run slow sometimes, and since we're using
+ # the system clock, add some extra slack
+ slack = 5
+ block_bad: FullBlock = recursive_replace(
+ blocks[-1],
+ "foliage_transaction_block.timestamp",
+ blocks[0].foliage_transaction_block.timestamp + time_delta + slack,
+ )
+ block_bad: FullBlock = recursive_replace(
+ block_bad,
+ "foliage.foliage_transaction_block_hash",
+ block_bad.foliage_transaction_block.get_hash(),
+ )
+ new_m = block_bad.foliage.foliage_transaction_block_hash
+ new_fbh_sig = bt.get_plot_signature(
+ new_m, blocks[-1].reward_chain_block.proof_of_space.plot_public_key
+ )
+ block_bad = recursive_replace(block_bad, "foliage.foliage_transaction_block_signature", new_fbh_sig)
+ await _validate_and_add_block(b, block_bad, expected_error=Err.TIMESTAMP_TOO_FAR_IN_FUTURE)
+ return None
+ await _validate_and_add_block(b, blocks[-1])
@pytest.mark.asyncio
async def test_height(self, empty_blockchain, bt):
@@ -1927,7 +1953,6 @@ async def test_timelock_conditions(self, opcode, lock_value, expected, with_soft
expected = AddBlockResult.NEW_PEAK
async with make_empty_blockchain(constants) as b:
-
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
@@ -2130,7 +2155,6 @@ async def test_ephemeral_timelock(self, opcode, lock_value, expected, with_garba
expected = rbr.NEW_PEAK
async with make_empty_blockchain(constants) as b:
-
blocks = bt.get_consecutive_blocks(
3,
guarantee_transaction_block=True,
diff --git a/tests/blockchain/test_blockchain_transactions.py b/tests/blockchain/test_blockchain_transactions.py
--- a/tests/blockchain/test_blockchain_transactions.py
+++ b/tests/blockchain/test_blockchain_transactions.py
@@ -1,23 +1,27 @@
from __future__ import annotations
import logging
+from typing import Tuple
import pytest
from clvm.casts import int_to_bytes
+from chia.full_node.full_node_api import FullNodeAPI
from chia.protocols import wallet_protocol
-from chia.simulator.block_tools import test_constants
+from chia.server.server import ChiaServer
+from chia.simulator.block_tools import BlockTools, test_constants
from chia.simulator.wallet_tools import WalletTool
from chia.types.announcement import Announcement
+from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.condition_opcodes import ConditionOpcode
from chia.types.condition_with_args import ConditionWithArgs
from chia.types.spend_bundle import SpendBundle
from chia.util.errors import ConsensusError, Err
-from chia.util.ints import uint64
+from chia.util.ints import uint32, uint64
from tests.blockchain.blockchain_test_utils import _validate_and_add_block
from tests.util.generator_tools_testing import run_and_get_removals_and_additions
-BURN_PUZZLE_HASH = b"0" * 32
+BURN_PUZZLE_HASH = bytes32(b"0" * 32)
WALLET_A = WalletTool(test_constants)
WALLET_A_PUZZLE_HASHES = [WALLET_A.get_new_puzzlehash() for _ in range(5)]
@@ -27,12 +31,14 @@
class TestBlockchainTransactions:
@pytest.mark.asyncio
- async def test_basic_blockchain_tx(self, two_nodes):
+ async def test_basic_blockchain_tx(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
+ ) -> None:
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
full_node_1 = full_node_api_1.full_node
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
@@ -47,7 +53,8 @@ async def test_basic_blockchain_tx(self, two_nodes):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
- spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin)
+ assert spend_coin is not None
+ spend_bundle = wallet_a.generate_signed_transaction(uint64(1000), receiver_puzzlehash, spend_coin)
assert spend_bundle is not None
tx: wallet_protocol.SendTransaction = wallet_protocol.SendTransaction(spend_bundle)
@@ -58,10 +65,9 @@ async def test_basic_blockchain_tx(self, two_nodes):
assert sb == spend_bundle
last_block = blocks[-1]
- next_spendbundle, additions, removals = full_node_1.mempool_manager.create_bundle_from_mempool(
- last_block.header_hash
- )
- assert next_spendbundle is not None
+ result = full_node_1.mempool_manager.create_bundle_from_mempool(last_block.header_hash)
+ assert result is not None
+ next_spendbundle, _ = result
new_blocks = bt.get_consecutive_blocks(
1,
@@ -74,7 +80,9 @@ async def test_basic_blockchain_tx(self, two_nodes):
next_block = new_blocks[-1]
await full_node_1.add_block(next_block)
- assert next_block.header_hash == full_node_1.blockchain.get_peak().header_hash
+ blockchain_peak = full_node_1.blockchain.get_peak()
+ assert blockchain_peak is not None
+ assert next_block.header_hash == blockchain_peak.header_hash
added_coins = next_spendbundle.additions()
@@ -87,12 +95,14 @@ async def test_basic_blockchain_tx(self, two_nodes):
assert not unspent.coinbase
@pytest.mark.asyncio
- async def test_validate_blockchain_with_double_spend(self, two_nodes):
+ async def test_validate_blockchain_with_double_spend(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
+ ) -> None:
num_blocks = 5
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
full_node_1 = full_node_api_1.full_node
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
@@ -106,9 +116,10 @@ async def test_validate_blockchain_with_double_spend(self, two_nodes):
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
+ assert spend_coin is not None
- spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin)
- spend_bundle_double = wallet_a.generate_signed_transaction(1001, receiver_puzzlehash, spend_coin)
+ spend_bundle = wallet_a.generate_signed_transaction(uint64(1000), receiver_puzzlehash, spend_coin)
+ spend_bundle_double = wallet_a.generate_signed_transaction(uint64(1001), receiver_puzzlehash, spend_coin)
block_spendbundle = SpendBundle.aggregate([spend_bundle, spend_bundle_double])
@@ -124,12 +135,14 @@ async def test_validate_blockchain_with_double_spend(self, two_nodes):
await _validate_and_add_block(full_node_1.blockchain, next_block, expected_error=Err.DOUBLE_SPEND)
@pytest.mark.asyncio
- async def test_validate_blockchain_duplicate_output(self, two_nodes):
+ async def test_validate_blockchain_duplicate_output(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
+ ) -> None:
num_blocks = 3
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
full_node_1 = full_node_api_1.full_node
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
@@ -144,9 +157,10 @@ async def test_validate_blockchain_duplicate_output(self, two_nodes):
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
+ assert spend_coin is not None
spend_bundle = wallet_a.generate_signed_transaction(
- 1000, receiver_puzzlehash, spend_coin, additional_outputs=[(receiver_puzzlehash, 1000)]
+ uint64(1000), receiver_puzzlehash, spend_coin, additional_outputs=[(receiver_puzzlehash, 1000)]
)
new_blocks = bt.get_consecutive_blocks(
@@ -161,12 +175,14 @@ async def test_validate_blockchain_duplicate_output(self, two_nodes):
await _validate_and_add_block(full_node_1.blockchain, next_block, expected_error=Err.DUPLICATE_OUTPUT)
@pytest.mark.asyncio
- async def test_validate_blockchain_with_reorg_double_spend(self, two_nodes):
+ async def test_validate_blockchain_with_reorg_double_spend(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
+ ) -> None:
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
@@ -180,8 +196,9 @@ async def test_validate_blockchain_with_reorg_double_spend(self, two_nodes):
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
+ assert spend_coin is not None
- spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin)
+ spend_bundle = wallet_a.generate_signed_transaction(uint64(1000), receiver_puzzlehash, spend_coin)
blocks_spend = bt.get_consecutive_blocks(
1,
@@ -267,14 +284,16 @@ async def test_validate_blockchain_with_reorg_double_spend(self, two_nodes):
await full_node_api_1.full_node.add_block(block)
@pytest.mark.asyncio
- async def test_validate_blockchain_spend_reorg_coin(self, two_nodes, softfork_height):
+ async def test_validate_blockchain_spend_reorg_coin(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools], softfork_height: uint32
+ ) -> None:
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_1_puzzlehash = WALLET_A_PUZZLE_HASHES[1]
receiver_2_puzzlehash = WALLET_A_PUZZLE_HASHES[2]
receiver_3_puzzlehash = WALLET_A_PUZZLE_HASHES[3]
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
@@ -347,12 +366,14 @@ async def test_validate_blockchain_spend_reorg_coin(self, two_nodes, softfork_he
await full_node_api_1.full_node.add_block(new_blocks[-1])
@pytest.mark.asyncio
- async def test_validate_blockchain_spend_reorg_cb_coin(self, two_nodes):
+ async def test_validate_blockchain_spend_reorg_cb_coin(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
+ ) -> None:
num_blocks = 15
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_1_puzzlehash = WALLET_A_PUZZLE_HASHES[1]
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
blocks = bt.get_consecutive_blocks(num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash)
for block in blocks:
@@ -375,7 +396,8 @@ async def test_validate_blockchain_spend_reorg_cb_coin(self, two_nodes):
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
- spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_1_puzzlehash, spend_coin)
+ assert spend_coin is not None
+ spend_bundle = wallet_a.generate_signed_transaction(uint64(1000), receiver_1_puzzlehash, spend_coin)
new_blocks = bt.get_consecutive_blocks(
1,
@@ -389,12 +411,14 @@ async def test_validate_blockchain_spend_reorg_cb_coin(self, two_nodes):
await full_node_api_1.full_node.add_block(new_blocks[-1])
@pytest.mark.asyncio
- async def test_validate_blockchain_spend_reorg_since_genesis(self, two_nodes):
+ async def test_validate_blockchain_spend_reorg_since_genesis(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
+ ) -> None:
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_1_puzzlehash = WALLET_A_PUZZLE_HASHES[1]
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
@@ -407,7 +431,8 @@ async def test_validate_blockchain_spend_reorg_since_genesis(self, two_nodes):
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
- spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_1_puzzlehash, spend_coin)
+ assert spend_coin is not None
+ spend_bundle = wallet_a.generate_signed_transaction(uint64(1000), receiver_1_puzzlehash, spend_coin)
new_blocks = bt.get_consecutive_blocks(
1, blocks, seed=b"", farmer_reward_puzzle_hash=coinbase_puzzlehash, transaction_data=spend_bundle
@@ -437,12 +462,14 @@ async def test_validate_blockchain_spend_reorg_since_genesis(self, two_nodes):
await full_node_api_1.full_node.add_block(new_blocks[-1])
@pytest.mark.asyncio
- async def test_assert_my_coin_id(self, two_nodes):
+ async def test_assert_my_coin_id(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
+ ) -> None:
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
full_node_1 = full_node_api_1.full_node
# Farm blocks
blocks = bt.get_consecutive_blocks(
@@ -461,17 +488,21 @@ async def test_assert_my_coin_id(self, two_nodes):
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
+ assert spend_coin is not None
for coin in list(bad_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
bad_spend_coin = coin
+ assert bad_spend_coin is not None
valid_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_COIN_ID, [spend_coin.name()])
valid_dic = {valid_cvp.opcode: [valid_cvp]}
bad_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_COIN_ID, [bad_spend_coin.name()])
bad_dic = {bad_cvp.opcode: [bad_cvp]}
- bad_spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin, bad_dic)
+ bad_spend_bundle = wallet_a.generate_signed_transaction(uint64(1000), receiver_puzzlehash, spend_coin, bad_dic)
- valid_spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin, valid_dic)
+ valid_spend_bundle = wallet_a.generate_signed_transaction(
+ uint64(1000), receiver_puzzlehash, spend_coin, valid_dic
+ )
assert bad_spend_bundle is not None
assert valid_spend_bundle is not None
@@ -503,13 +534,14 @@ async def test_assert_my_coin_id(self, two_nodes):
await _validate_and_add_block(full_node_1.blockchain, new_blocks[-1])
@pytest.mark.asyncio
- async def test_assert_coin_announcement_consumed(self, two_nodes):
+ async def test_assert_coin_announcement_consumed(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
+ ) -> None:
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
-
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
full_node_1 = full_node_api_1.full_node
# Farm blocks
blocks = bt.get_consecutive_blocks(
@@ -528,9 +560,11 @@ async def test_assert_coin_announcement_consumed(self, two_nodes):
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
+ assert spend_coin_block_1 is not None
for coin in list(block2.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_2 = coin
+ assert spend_coin_block_2 is not None
# This condition requires block2 coinbase to be spent
block1_cvp = ConditionWithArgs(
@@ -539,7 +573,7 @@ async def test_assert_coin_announcement_consumed(self, two_nodes):
)
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
- 1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
+ uint64(1000), receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# This condition requires block1 coinbase to be spent
@@ -549,7 +583,7 @@ async def test_assert_coin_announcement_consumed(self, two_nodes):
)
block2_dic = {block2_cvp.opcode: [block2_cvp]}
block2_spend_bundle = wallet_a.generate_signed_transaction(
- 1000, receiver_puzzlehash, spend_coin_block_2, block2_dic
+ uint64(1000), receiver_puzzlehash, spend_coin_block_2, block2_dic
)
# Invalid block bundle
@@ -584,13 +618,14 @@ async def test_assert_coin_announcement_consumed(self, two_nodes):
await _validate_and_add_block(full_node_1.blockchain, new_blocks[-1])
@pytest.mark.asyncio
- async def test_assert_puzzle_announcement_consumed(self, two_nodes):
+ async def test_assert_puzzle_announcement_consumed(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
+ ) -> None:
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
-
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
full_node_1 = full_node_api_1.full_node
# Farm blocks
blocks = bt.get_consecutive_blocks(
@@ -609,9 +644,11 @@ async def test_assert_puzzle_announcement_consumed(self, two_nodes):
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
+ assert spend_coin_block_1 is not None
for coin in list(block2.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_2 = coin
+ assert spend_coin_block_2 is not None
# This condition requires block2 coinbase to be spent
block1_cvp = ConditionWithArgs(
@@ -620,7 +657,7 @@ async def test_assert_puzzle_announcement_consumed(self, two_nodes):
)
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
- 1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
+ uint64(1000), receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# This condition requires block1 coinbase to be spent
@@ -630,7 +667,7 @@ async def test_assert_puzzle_announcement_consumed(self, two_nodes):
)
block2_dic = {block2_cvp.opcode: [block2_cvp]}
block2_spend_bundle = wallet_a.generate_signed_transaction(
- 1000, receiver_puzzlehash, spend_coin_block_2, block2_dic
+ uint64(1000), receiver_puzzlehash, spend_coin_block_2, block2_dic
)
# Invalid block bundle
@@ -665,13 +702,14 @@ async def test_assert_puzzle_announcement_consumed(self, two_nodes):
await _validate_and_add_block(full_node_1.blockchain, new_blocks[-1])
@pytest.mark.asyncio
- async def test_assert_height_absolute(self, two_nodes):
+ async def test_assert_height_absolute(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
+ ) -> None:
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
-
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
full_node_1 = full_node_api_1.full_node
# Farm blocks
blocks = bt.get_consecutive_blocks(
@@ -687,12 +725,13 @@ async def test_assert_height_absolute(self, two_nodes):
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
+ assert spend_coin_block_1 is not None
# This condition requires block1 coinbase to be spent after index 10
block1_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, [int_to_bytes(10)])
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
- 1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
+ uint64(1000), receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# program that will be sent too early
@@ -729,13 +768,14 @@ async def test_assert_height_absolute(self, two_nodes):
await _validate_and_add_block(full_node_1.blockchain, new_blocks[-1])
@pytest.mark.asyncio
- async def test_assert_height_relative(self, two_nodes):
+ async def test_assert_height_relative(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
+ ) -> None:
num_blocks = 11
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
-
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
full_node_1 = full_node_api_1.full_node
# Farm blocks
blocks = bt.get_consecutive_blocks(
@@ -751,6 +791,7 @@ async def test_assert_height_relative(self, two_nodes):
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
+ assert spend_coin_block_1 is not None
# This condition requires block1 coinbase to be spent after index 11
# This condition requires block1 coinbase to be spent more than 10 block after it was farmed
@@ -758,7 +799,7 @@ async def test_assert_height_relative(self, two_nodes):
block1_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, [int_to_bytes(9)])
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
- 1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
+ uint64(1000), receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# program that will be sent too early
@@ -795,13 +836,14 @@ async def test_assert_height_relative(self, two_nodes):
await _validate_and_add_block(full_node_1.blockchain, new_blocks[-1])
@pytest.mark.asyncio
- async def test_assert_seconds_relative(self, two_nodes):
+ async def test_assert_seconds_relative(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
+ ) -> None:
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
-
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
full_node_1 = full_node_api_1.full_node
# Farm blocks
blocks = bt.get_consecutive_blocks(
@@ -817,12 +859,13 @@ async def test_assert_seconds_relative(self, two_nodes):
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
+ assert spend_coin_block_1 is not None
# This condition requires block1 coinbase to be spent 300 seconds after coin creation
block1_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_SECONDS_RELATIVE, [int_to_bytes(300)])
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
- 1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
+ uint64(1000), receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# program that will be sent to early
@@ -852,13 +895,14 @@ async def test_assert_seconds_relative(self, two_nodes):
await _validate_and_add_block(full_node_1.blockchain, valid_new_blocks[-1])
@pytest.mark.asyncio
- async def test_assert_seconds_absolute(self, two_nodes):
+ async def test_assert_seconds_absolute(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
+ ) -> None:
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
-
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
full_node_1 = full_node_api_1.full_node
# Farm blocks
blocks = bt.get_consecutive_blocks(
@@ -874,13 +918,15 @@ async def test_assert_seconds_absolute(self, two_nodes):
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
+ assert spend_coin_block_1 is not None
# This condition requires block1 coinbase to be spent after 30 seconds from now
+ assert blocks[-1].foliage_transaction_block is not None
current_time_plus3 = uint64(blocks[-1].foliage_transaction_block.timestamp + 30)
block1_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_SECONDS_ABSOLUTE, [int_to_bytes(current_time_plus3)])
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
- 1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
+ uint64(1000), receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# program that will be sent to early
@@ -910,13 +956,14 @@ async def test_assert_seconds_absolute(self, two_nodes):
await _validate_and_add_block(full_node_1.blockchain, valid_new_blocks[-1])
@pytest.mark.asyncio
- async def test_assert_fee_condition(self, two_nodes):
+ async def test_assert_fee_condition(
+ self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
+ ) -> None:
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
-
- full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
+ full_node_api_1, _, _, _, bt = two_nodes
full_node_1 = full_node_api_1.full_node
# Farm blocks
blocks = bt.get_consecutive_blocks(
@@ -932,6 +979,7 @@ async def test_assert_fee_condition(self, two_nodes):
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
+ assert spend_coin_block_1 is not None
# This condition requires fee to be 10 mojo
cvp_fee = ConditionWithArgs(ConditionOpcode.RESERVE_FEE, [int_to_bytes(10)])
@@ -939,10 +987,10 @@ async def test_assert_fee_condition(self, two_nodes):
block1_dic_bad = {cvp_fee.opcode: [cvp_fee]}
block1_dic_good = {cvp_fee.opcode: [cvp_fee]}
block1_spend_bundle_bad = wallet_a.generate_signed_transaction(
- 1000, receiver_puzzlehash, spend_coin_block_1, block1_dic_bad, fee=9
+ uint64(1000), receiver_puzzlehash, spend_coin_block_1, block1_dic_bad, fee=9
)
block1_spend_bundle_good = wallet_a.generate_signed_transaction(
- 1000, receiver_puzzlehash, spend_coin_block_1, block1_dic_good, fee=10
+ uint64(1000), receiver_puzzlehash, spend_coin_block_1, block1_dic_good, fee=10
)
log.warning(block1_spend_bundle_good.additions())
log.warning(f"Spend bundle fees: {block1_spend_bundle_good.fees()}")
diff --git a/tests/build-init-files.py b/tests/build-init-files.py
--- a/tests/build-init-files.py
+++ b/tests/build-init-files.py
@@ -13,7 +13,7 @@
import logging
import pathlib
-import sys
+from typing import List
import click
@@ -27,6 +27,31 @@
ignores = {"__pycache__", ".pytest_cache"}
+def traverse_directory(path: pathlib.Path) -> List[pathlib.Path]:
+ of_interest: List[pathlib.Path] = []
+
+ file_found = False
+
+ for member in path.iterdir():
+ if not member.is_dir():
+ file_found = True
+ continue
+
+ if member.name in ignores:
+ continue
+
+ found = traverse_directory(path=member)
+ of_interest.extend(found)
+
+ if len(found) > 0:
+ of_interest.append(member)
+
+ if len(of_interest) > 0 or file_found:
+ of_interest.append(path)
+
+ return of_interest
+
+
@click.command()
@click.option(
"-r", "--root", "root_str", type=click.Path(dir_okay=True, file_okay=False, resolve_path=True), default="."
@@ -42,12 +67,9 @@ def command(verbose, root_str):
tree_roots = ["benchmarks", "build_scripts", "chia", "tests", "tools"]
failed = False
root = pathlib.Path(root_str).resolve()
- directories = sorted(
- path
- for tree_root in tree_roots
- for path in root.joinpath(tree_root).rglob("**/")
- if all(part not in ignores for part in path.parts)
- )
+ directories = [
+ directory for tree_root in tree_roots for directory in traverse_directory(path=root.joinpath(tree_root))
+ ]
for path in directories:
init_path = path.joinpath("__init__.py")
@@ -62,7 +84,7 @@ def command(verbose, root_str):
logger.warning(f"Created : {init_path}")
else:
failed = True
- logger.error(f"Fail : present but not a regular file: {init_path}", file=sys.stderr)
+ logger.error(f"Fail : present but not a regular file: {init_path}")
if failed:
raise click.ClickException("At least one __init__.py created or not a regular file")
diff --git a/tests/check_sql_statements.py b/tests/check_sql_statements.py
--- a/tests/check_sql_statements.py
+++ b/tests/check_sql_statements.py
@@ -9,7 +9,13 @@
def check_create(sql_type: str, cwd: str, exemptions: Set[Tuple[str, str]] = set()) -> int:
- lines = check_output(["git", "grep", f"CREATE {sql_type}"], cwd=cwd).decode("ascii").split("\n")
+ # the need for this change seems to come from the git precommit plus the python pre-commit environment
+ # having GIT_DIR specified but not GIT_WORK_TREE. this is an issue in some less common git setups
+ # such as with worktrees, at least in particular uses of them. i think that we could switch to letting
+ # pre-commit provide the file list instead of reaching out to git to build that list ourselves. until we
+ # make time to handle that, this is an alternative to alleviate the issue.
+ exemptions = set((cwd + "/" + file, name) for file, name in exemptions)
+ lines = check_output(["git", "grep", f"CREATE {sql_type}"]).decode("ascii").split("\n")
ret = 0
@@ -21,6 +27,8 @@ def check_create(sql_type: str, cwd: str, exemptions: Set[Tuple[str, str]] = set
continue
if "db_upgrade_func.py" in line:
continue
+ if not line.startswith(cwd):
+ continue
name = line.split(f"CREATE {sql_type}")[1]
if name.startswith(" IF NOT EXISTS"):
diff --git a/tests/clvm/coin_store.py b/tests/clvm/coin_store.py
--- a/tests/clvm/coin_store.py
+++ b/tests/clvm/coin_store.py
@@ -59,7 +59,7 @@ def validate_spend_bundle(self, spend_bundle: SpendBundle, now: CoinTimestamp, m
program = simple_solution_generator(spend_bundle)
# always use the post soft-fork2 semantics
- result: NPCResult = get_name_puzzle_conditions(program, max_cost, mempool_mode=True, height=uint32(4000000))
+ result: NPCResult = get_name_puzzle_conditions(program, max_cost, mempool_mode=True, height=uint32(3886635))
if result.error is not None:
raise BadSpendBundleError(f"condition validation failure {Err(result.error)}")
diff --git a/tests/clvm/test_chialisp_deserialization.py b/tests/clvm/test_chialisp_deserialization.py
--- a/tests/clvm/test_chialisp_deserialization.py
+++ b/tests/clvm/test_chialisp_deserialization.py
@@ -6,7 +6,7 @@
from chia.util.byte_types import hexstr_to_bytes
from chia.wallet.puzzles.load_clvm import load_clvm
-DESERIALIZE_MOD = load_clvm("chialisp_deserialisation.clvm", package_or_requirement="chia.wallet.puzzles")
+DESERIALIZE_MOD = load_clvm("chialisp_deserialisation.clsp", package_or_requirement="chia.wallet.puzzles")
def serialized_atom_overflow(size):
diff --git a/tests/clvm/test_puzzles.py b/tests/clvm/test_puzzles.py
--- a/tests/clvm/test_puzzles.py
+++ b/tests/clvm/test_puzzles.py
@@ -8,7 +8,6 @@
from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.coin_spend import CoinSpend
-from chia.types.condition_opcodes import ConditionOpcode
from chia.types.spend_bundle import SpendBundle
from chia.util.hash import std_hash
from chia.wallet.puzzles import (
@@ -19,6 +18,7 @@
p2_m_of_n_delegate_direct,
p2_puzzle_hash,
)
+from chia.wallet.puzzles.puzzle_utils import make_create_coin_condition
from tests.util.key_tool import KeyTool
from ..core.make_block_generator import int_to_public_key
@@ -101,14 +101,10 @@ def default_payments_and_conditions(
(throwaway_puzzle_hash(initial_index + 1, key_lookup), initial_index * 10),
(throwaway_puzzle_hash(initial_index + 2, key_lookup), (initial_index + 1) * 10),
]
- conditions = Program.to([make_create_coin_condition(ph, amount) for ph, amount in payments])
+ conditions = Program.to([make_create_coin_condition(ph, amount, []) for ph, amount in payments])
return payments, conditions
-def make_create_coin_condition(puzzle_hash, amount):
- return Program.to([ConditionOpcode.CREATE_COIN, puzzle_hash, amount])
-
-
class TestPuzzles(TestCase):
def test_p2_conditions(self):
key_lookup = KeyTool()
diff --git a/tests/clvm/test_serialized_program.py b/tests/clvm/test_serialized_program.py
--- a/tests/clvm/test_serialized_program.py
+++ b/tests/clvm/test_serialized_program.py
@@ -6,7 +6,7 @@
from chia.types.blockchain_format.serialized_program import SerializedProgram
from chia.wallet.puzzles.load_clvm import load_clvm
-SHA256TREE_MOD = load_clvm("sha256tree_module.clvm")
+SHA256TREE_MOD = load_clvm("sha256tree_module.clsp")
# TODO: test multiple args
diff --git a/tests/clvm/test_singletons.py b/tests/clvm/test_singletons.py
--- a/tests/clvm/test_singletons.py
+++ b/tests/clvm/test_singletons.py
@@ -23,11 +23,11 @@
"""
This test suite aims to test:
- chia.wallet.puzzles.singleton_top_layer.py
- - chia.wallet.puzzles.singleton_top_layer.clvm
+ - chia.wallet.puzzles.singleton_top_layer.clsp
- chia.wallet.puzzles.singleton_top_layer_v1_1.py
- - chia.wallet.puzzles.singleton_top_layer_v1_1.clvm
- - chia.wallet.puzzles.p2_singleton.clvm
- - chia.wallet.puzzles.p2_singleton_or_delayed_puzhash.clvm
+ - chia.wallet.puzzles.singleton_top_layer_v1_1.clsp
+ - chia.wallet.puzzles.p2_singleton.clsp
+ - chia.wallet.puzzles.p2_singleton_or_delayed_puzhash.clsp
"""
diff --git a/tests/clvm/test_spend_sim.py b/tests/clvm/test_spend_sim.py
--- a/tests/clvm/test_spend_sim.py
+++ b/tests/clvm/test_spend_sim.py
@@ -128,8 +128,8 @@ async def test_all_endpoints(self):
assert removals
# get_puzzle_and_solution
- coin_solution = await sim_client.get_puzzle_and_solution(spendable_coin.name(), latest_block.height)
- assert coin_solution
+ coin_spend = await sim_client.get_puzzle_and_solution(spendable_coin.name(), latest_block.height)
+ assert coin_spend == bundle.coin_spends[0]
# get_coin_records_by_parent_ids
new_coin = next(x.coin for x in additions if x.coin.puzzle_hash == puzzle_hash)
diff --git a/tests/cmds/test_sim.py b/tests/cmds/test_sim.py
new file mode 100644
--- /dev/null
+++ b/tests/cmds/test_sim.py
@@ -0,0 +1,142 @@
+from __future__ import annotations
+
+from pathlib import Path
+from shutil import rmtree
+
+from click.testing import CliRunner, Result
+
+from chia.cmds.chia import cli
+from chia.util.default_root import SIMULATOR_ROOT_PATH
+
+mnemonic = ( # ignore any secret warnings
+ "cup smoke miss park baby say island tomorrow segment lava bitter easily settle gift renew arrive kangaroo dilemma "
+ "organ skin design salt history awesome"
+)
+fingerprint = 2640131813
+std_farming_address = "txch1mh4qanzyawn3v4uphgaj2cg6hrjazwyp0sx653fhn9apg6mfajlqtj0ztp"
+burn_address = "txch1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqm6ksh7qddh" # 0x0...dead
+
+SIMULATOR_ROOT_PATH.mkdir(parents=True, exist_ok=True) # this simplifies code later
+
+
+def get_profile_path(starting_string: str) -> str:
+ """
+ Returns the name of a profile that does not exist yet.
+ """
+ i = 0
+ while Path(SIMULATOR_ROOT_PATH / (starting_string + str(i))).exists():
+ i += 1
+ return starting_string + str(i)
+
+
+def test_every_simulator_command() -> None:
+ starting_str = "ci_test"
+ simulator_name = get_profile_path(starting_str)
+ runner: CliRunner = CliRunner()
+ address = std_farming_address
+ start_result: Result = runner.invoke(cli, ["dev", "sim", "-n", simulator_name, "create", "-bm", mnemonic])
+ assert start_result.exit_code == 0
+ assert f"Farming & Prefarm reward address: {address}" in start_result.output
+ assert "chia_full_node_simulator: started" in start_result.output
+ assert "Genesis block generated, exiting." in start_result.output
+ try:
+ # run all tests
+ run_all_tests(runner, address, simulator_name)
+ finally:
+ stop_simulator(runner, simulator_name)
+
+
+def test_custom_farming_address() -> None:
+ runner: CliRunner = CliRunner()
+ address = burn_address
+ starting_str = "ci_address_test"
+ simulator_name = get_profile_path(starting_str)
+ start_result: Result = runner.invoke(
+ cli, ["dev", "sim", "-n", simulator_name, "create", "-bm", mnemonic, "--reward-address", address]
+ )
+ assert start_result.exit_code == 0
+ assert f"Farming & Prefarm reward address: {address}" in start_result.output
+ assert "chia_full_node_simulator: started" in start_result.output
+ assert "Genesis block generated, exiting." in start_result.output
+
+ try:
+ # just run status test
+ _test_sim_status(runner, address, simulator_name)
+ finally:
+ stop_simulator(runner, simulator_name)
+
+
+def stop_simulator(runner: CliRunner, simulator_name: str) -> None:
+ """Stop simulator."""
+ result: Result = runner.invoke(cli, ["dev", "sim", "-n", simulator_name, "stop", "-d"])
+ assert result.exit_code == 0
+ assert "chia_full_node_simulator: Stopped\nDaemon stopped\n" == result.output
+ rmtree(SIMULATOR_ROOT_PATH / simulator_name)
+
+
+def run_all_tests(runner: CliRunner, address: str, simulator_name: str) -> None:
+ """Run all tests."""
+ _test_sim_status(runner, address, simulator_name)
+ _test_farm_and_revert_block(runner, address, simulator_name)
+
+
+def _test_sim_status(runner: CliRunner, address: str, simulator_name: str) -> None:
+ # show everything
+ result: Result = runner.invoke(cli, ["dev", "sim", "-n", simulator_name, "status", "--show-key", "-cia"])
+ assert result.exit_code == 0
+ # asserts are grouped by arg
+ assert f"Fingerprint: {fingerprint}" and f"Mnemonic seed (24 secret words):\n{mnemonic}" in result.output # -k
+
+ assert "Network: simulator0" and "Current Blockchain Status: Full Node Synced" in result.output # default
+ assert "Height: 1" in result.output # default
+ assert f"Current Farming address: {address}, with a balance of: 21000000.0 TXCH." in result.output # default
+
+ assert (
+ f"Address: {address} has a balance of: 21000000000000000000 mojo, with a total of: 2 transactions."
+ in result.output
+ ) # -ia
+ assert "Coin Amount: 2625000000000000000 mojo" in result.output # -ic
+
+
+def _test_farm_and_revert_block(runner: CliRunner, address: str, simulator_name: str) -> None:
+ # make 5 blocks
+ five_blocks_result: Result = runner.invoke(
+ cli, ["dev", "sim", "-n", simulator_name, "farm", "-b", "5", "-a", address]
+ )
+ assert five_blocks_result.exit_code == 0
+ assert "Farmed 5 Transaction blocks" in five_blocks_result.output
+
+ # check that height increased
+ five_blocks_check: Result = runner.invoke(cli, ["dev", "sim", "-n", simulator_name, "status"])
+ assert five_blocks_check.exit_code == 0
+ assert "Height: 6" in five_blocks_check.output
+
+ # do a reorg, 3 blocks back, 2 blocks forward, height now 8
+ reorg_result: Result = runner.invoke(cli, ["dev", "sim", "-n", simulator_name, "revert", "-b", "3", "-n", "2"])
+ assert reorg_result.exit_code == 0
+ assert "Block: 3 and above " and "Block Height is now: 8" in reorg_result.output
+
+ # check that height changed by 2
+ reorg_check: Result = runner.invoke(cli, ["dev", "sim", "-n", simulator_name, "status"])
+ assert reorg_check.exit_code == 0
+ assert "Height: 8" in reorg_check.output
+
+ # do a forceful reorg 4 blocks back
+ forced_reorg_result: Result = runner.invoke(cli, ["dev", "sim", "-n", simulator_name, "revert", "-b", "4", "-fd"])
+ assert forced_reorg_result.exit_code == 0
+ assert "Block: 8 and above were successfully deleted" and "Block Height is now: 4" in forced_reorg_result.output
+
+ # check that height changed by 4
+ forced_reorg_check: Result = runner.invoke(cli, ["dev", "sim", "-n", simulator_name, "status"])
+ assert forced_reorg_check.exit_code == 0
+ assert "Height: 4" in forced_reorg_check.output
+
+ # test chain reset to genesis
+ genesis_reset_result: Result = runner.invoke(cli, ["dev", "sim", "-n", simulator_name, "revert", "-fd", "--reset"])
+ assert genesis_reset_result.exit_code == 0
+ assert "Block: 2 and above were successfully deleted" and "Block Height is now: 1" in genesis_reset_result.output
+
+ # check that height changed to 1
+ genesis_reset_check: Result = runner.invoke(cli, ["dev", "sim", "-n", simulator_name, "status"])
+ assert genesis_reset_check.exit_code == 0
+ assert "Height: 1" in genesis_reset_check.output
diff --git a/tests/conftest.py b/tests/conftest.py
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -124,17 +124,17 @@ async def empty_blockchain(request):
@pytest.fixture(scope="function")
-def latest_db_version():
+def latest_db_version() -> int:
return 2
@pytest.fixture(scope="function", params=[1, 2])
-def db_version(request):
+def db_version(request) -> int:
return request.param
-@pytest.fixture(scope="function", params=[1000000, 3630000, 4000000])
-def softfork_height(request):
+@pytest.fixture(scope="function", params=[1000000, 3886635, 4200000, 5496000])
+def softfork_height(request) -> int:
return request.param
diff --git a/tests/connection_utils.py b/tests/connection_utils.py
--- a/tests/connection_utils.py
+++ b/tests/connection_utils.py
@@ -62,7 +62,6 @@ async def add_dummy_connection(
log,
True,
server.received_message_callback,
- self_hostname,
None,
peer_id,
100,
diff --git a/tests/core/daemon/test_daemon.py b/tests/core/daemon/test_daemon.py
--- a/tests/core/daemon/test_daemon.py
+++ b/tests/core/daemon/test_daemon.py
@@ -21,10 +21,24 @@
from chia.simulator.time_out_assert import time_out_assert, time_out_assert_custom_interval
from chia.types.peer_info import PeerInfo
from chia.util.ints import uint16
-from chia.util.keychain import Keychain, KeyData
+from chia.util.keychain import Keychain, KeyData, supports_os_passphrase_storage
from chia.util.keyring_wrapper import DEFAULT_PASSPHRASE_IF_NO_MASTER_PASSPHRASE
from chia.util.ws_message import create_payload
from tests.core.node_height import node_height_at_least
+from tests.util.misc import Marks, datacases
+
+
+@dataclass
+class RouteCase:
+ route: str
+ description: str
+ request: Dict[str, Any]
+ response: Dict[str, Any]
+ marks: Marks = ()
+
+ @property
+ def id(self) -> str:
+ return f"{self.route}: {self.description}"
# Simple class that responds to a poll() call used by WebSocketServer.is_running()
@@ -809,6 +823,64 @@ async def test_bad_json(daemon_connection_and_temp_keychain: Tuple[aiohttp.Clien
assert message["data"]["error"].startswith("Expecting property name")
+@datacases(
+ RouteCase(
+ route="register_service",
+ description="no service name",
+ request={
+ "fred": "barney",
+ },
+ response={"success": False},
+ ),
+ RouteCase(
+ route="register_service",
+ description="chia_plotter",
+ request={
+ "service": "chia_plotter",
+ },
+ response={"success": True, "service": "chia_plotter", "queue": []},
+ ),
+ RouteCase(
+ route="unknown_command",
+ description="non-existant route",
+ request={},
+ response={"success": False, "error": "unknown_command unknown_command"},
+ ),
+ RouteCase(
+ route="running_services",
+ description="successful",
+ request={},
+ response={"success": True, "running_services": []},
+ ),
+ RouteCase(
+ route="keyring_status",
+ description="successful",
+ request={},
+ response={
+ "can_save_passphrase": supports_os_passphrase_storage(),
+ "can_set_passphrase_hint": True,
+ "is_keyring_locked": False,
+ "passphrase_hint": "",
+ "passphrase_requirements": {"is_optional": True, "min_length": 8},
+ "success": True,
+ "user_passphrase_is_set": False,
+ },
+ ),
+)
+@pytest.mark.asyncio
+async def test_misc_daemon_ws(
+ daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain],
+ case: RouteCase,
+) -> None:
+ ws, _ = daemon_connection_and_temp_keychain
+
+ payload = create_payload(case.route, case.request, "service_name", "daemon")
+ await ws.send_str(payload)
+ response = await ws.receive()
+
+ assert_response(response, case.response)
+
+
@pytest.mark.asyncio
async def test_unexpected_json(
daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain]
@@ -823,3 +895,215 @@ async def test_unexpected_json(
message = json.loads(response.data.strip())
assert message["data"]["success"] is False
assert message["data"]["error"].startswith("'command'")
+
+
+@pytest.mark.parametrize(
+ "command_to_test",
+ [("start_service"), ("stop_service"), ("start_plotting"), ("stop_plotting"), ("is_running"), ("register_service")],
+)
+@pytest.mark.asyncio
+async def test_commands_with_no_data(
+ daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain], command_to_test: str
+) -> None:
+ ws, _ = daemon_connection_and_temp_keychain
+
+ payload = create_payload(command_to_test, {}, "service_name", "daemon")
+
+ await ws.send_str(payload)
+ response = await ws.receive()
+
+ assert_response(response, {"success": False, "error": f'{command_to_test} requires "data"'})
+
+
+@datacases(
+ RouteCase(
+ route="set_keyring_passphrase",
+ description="no passphrase",
+ request={
+ "passphrase_hint": "this is a hint",
+ "save_passphrase": False,
+ },
+ response={"success": False, "error": "missing new_passphrase"},
+ ),
+ RouteCase(
+ route="set_keyring_passphrase",
+ description="incorrect type",
+ request={
+ "passphrase_hint": "this is a hint",
+ "save_passphrase": False,
+ "new_passphrase": True,
+ },
+ response={"success": False, "error": "missing new_passphrase"},
+ ),
+ RouteCase(
+ route="set_keyring_passphrase",
+ description="correct",
+ request={
+ "passphrase_hint": "this is a hint",
+ "new_passphrase": "this is a passphrase",
+ },
+ response={"success": True, "error": None},
+ ),
+)
+@pytest.mark.asyncio
+async def test_set_keyring_passphrase_ws(
+ daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain],
+ case: RouteCase,
+) -> None:
+ ws, _ = daemon_connection_and_temp_keychain
+
+ payload = create_payload(case.route, case.request, "service_name", "daemon")
+ await ws.send_str(payload)
+ response = await ws.receive()
+
+ assert_response(response, case.response)
+
+
+@datacases(
+ RouteCase(
+ route="remove_keyring_passphrase",
+ description="wrong current passphrase",
+ request={"current_passphrase": "wrong passphrase"},
+ response={"success": False, "error": "current passphrase is invalid"},
+ ),
+ RouteCase(
+ route="remove_keyring_passphrase",
+ description="incorrect type",
+ request={"current_passphrase": True},
+ response={"success": False, "error": "missing current_passphrase"},
+ ),
+ RouteCase(
+ route="remove_keyring_passphrase",
+ description="missing current passphrase",
+ request={},
+ response={"success": False, "error": "missing current_passphrase"},
+ ),
+ RouteCase(
+ route="remove_keyring_passphrase",
+ description="correct",
+ request={"current_passphrase": "this is a passphrase"},
+ response={"success": True, "error": None},
+ ),
+ RouteCase(
+ route="unlock_keyring",
+ description="wrong current passphrase",
+ request={"key": "wrong passphrase"},
+ response={"success": False, "error": "bad passphrase"},
+ ),
+ RouteCase(
+ route="unlock_keyring",
+ description="incorrect type",
+ request={"key": True},
+ response={"success": False, "error": "missing key"},
+ ),
+ RouteCase(
+ route="unlock_keyring",
+ description="missing data",
+ request={},
+ response={"success": False, "error": "missing key"},
+ ),
+ RouteCase(
+ route="unlock_keyring",
+ description="correct",
+ request={"key": "this is a passphrase"},
+ response={"success": True, "error": None},
+ ),
+ RouteCase(
+ route="set_keyring_passphrase",
+ description="no current passphrase",
+ request={
+ "save_passphrase": False,
+ "new_passphrase": "another new passphrase",
+ },
+ response={"success": False, "error": "missing current_passphrase"},
+ ),
+ RouteCase(
+ route="set_keyring_passphrase",
+ description="incorrect current passphrase",
+ request={
+ "save_passphrase": False,
+ "current_passphrase": "none",
+ "new_passphrase": "another new passphrase",
+ },
+ response={"success": False, "error": "current passphrase is invalid"},
+ ),
+ RouteCase(
+ route="set_keyring_passphrase",
+ description="incorrect type",
+ request={
+ "save_passphrase": False,
+ "current_passphrase": False,
+ "new_passphrase": "another new passphrase",
+ },
+ response={"success": False, "error": "missing current_passphrase"},
+ ),
+ RouteCase(
+ route="set_keyring_passphrase",
+ description="correct",
+ request={
+ "save_passphrase": False,
+ "current_passphrase": "this is a passphrase",
+ "new_passphrase": "another new passphrase",
+ },
+ response={"success": True, "error": None},
+ ),
+)
+@pytest.mark.asyncio
+async def test_passphrase_apis(
+ daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain],
+ case: RouteCase,
+) -> None:
+ ws, keychain = daemon_connection_and_temp_keychain
+
+ keychain.set_master_passphrase(
+ current_passphrase=DEFAULT_PASSPHRASE_IF_NO_MASTER_PASSPHRASE, new_passphrase="this is a passphrase"
+ )
+
+ payload = create_payload(
+ case.route,
+ case.request,
+ "service_name",
+ "daemon",
+ )
+ await ws.send_str(payload)
+ response = await ws.receive()
+
+ assert_response(response, case.response)
+
+
+@datacases(
+ RouteCase(
+ route="unlock_keyring",
+ description="exception",
+ request={"key": "this is a passphrase"},
+ response={"success": False, "error": "validation exception"},
+ ),
+ RouteCase(
+ route="validate_keyring_passphrase",
+ description="exception",
+ request={"key": "this is a passphrase"},
+ response={"success": False, "error": "validation exception"},
+ ),
+)
+@pytest.mark.asyncio
+async def test_keyring_file_deleted(
+ daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain],
+ case: RouteCase,
+) -> None:
+ ws, keychain = daemon_connection_and_temp_keychain
+
+ keychain.set_master_passphrase(
+ current_passphrase=DEFAULT_PASSPHRASE_IF_NO_MASTER_PASSPHRASE, new_passphrase="this is a passphrase"
+ )
+ keychain.keyring_wrapper.keyring.keyring_path.unlink()
+
+ payload = create_payload(
+ case.route,
+ case.request,
+ "service_name",
+ "daemon",
+ )
+ await ws.send_str(payload)
+ response = await ws.receive()
+
+ assert_response(response, case.response)
diff --git a/tests/core/daemon/test_daemon_register.py b/tests/core/daemon/test_daemon_register.py
--- a/tests/core/daemon/test_daemon_register.py
+++ b/tests/core/daemon/test_daemon_register.py
@@ -24,7 +24,6 @@ async def test_multiple_register_same(get_daemon: WebSocketServer, bt: BlockTool
ssl_context=bt.get_daemon_ssl_context(),
max_msg_size=100 * 1024 * 1024,
) as ws:
-
service_name = "test_service"
data = {"service": service_name}
payload = create_payload("register_service", data, service_name, "daemon")
@@ -52,7 +51,6 @@ async def test_multiple_register_different(get_daemon: WebSocketServer, bt: Bloc
ssl_context=bt.get_daemon_ssl_context(),
max_msg_size=100 * 1024 * 1024,
) as ws:
-
test_service_names = ["service1", "service2", "service3"]
for service_name in test_service_names:
@@ -89,7 +87,6 @@ async def test_remove_connection(get_daemon: WebSocketServer, bt: BlockTools) ->
ssl_context=bt.get_daemon_ssl_context(),
max_msg_size=100 * 1024 * 1024,
) as ws:
-
test_service_names = ["service1", "service2", "service3", "service4", "service5"]
for service_name in test_service_names:
diff --git a/tests/core/data_layer/test_data_rpc.py b/tests/core/data_layer/test_data_rpc.py
--- a/tests/core/data_layer/test_data_rpc.py
+++ b/tests/core/data_layer/test_data_rpc.py
@@ -53,7 +53,9 @@ async def init_data_layer(
config["data_layer"]["rpc_port"] = 0
config["data_layer"]["database_path"] = str(db_path.joinpath("db.sqlite"))
save_config(bt.root_path, "config.yaml", config)
- service = create_data_layer_service(root_path=bt.root_path, config=config, wallet_service=wallet_service)
+ service = create_data_layer_service(
+ root_path=bt.root_path, config=config, wallet_service=wallet_service, downloaders=[], uploaders=[]
+ )
await service.start()
try:
yield service._api.data_layer
diff --git a/tests/core/full_node/test_full_node.py b/tests/core/full_node/test_full_node.py
--- a/tests/core/full_node/test_full_node.py
+++ b/tests/core/full_node/test_full_node.py
@@ -46,6 +46,7 @@
from chia.util.errors import ConsensusError, Err
from chia.util.hash import std_hash
from chia.util.ints import uint8, uint16, uint32, uint64
+from chia.util.limited_semaphore import LimitedSemaphore
from chia.util.recursive_replace import recursive_replace
from chia.util.vdf_prover import get_vdf_info_and_proof
from chia.wallet.transaction_record import TransactionRecord
@@ -1903,6 +1904,61 @@ async def test_compact_protocol_invalid_messages(self, setup_two_nodes_fixture,
assert not block.challenge_chain_sp_proof.normalized_to_identity
assert not block.challenge_chain_ip_proof.normalized_to_identity
+ @pytest.mark.asyncio
+ async def test_respond_compact_proof_message_limit(self, setup_two_nodes_fixture):
+ nodes, _, bt = setup_two_nodes_fixture
+ full_node_1 = nodes[0]
+ full_node_2 = nodes[1]
+ NUM_BLOCKS = 20
+ # We don't compactify the last 5 blocks.
+ EXPECTED_COMPACTIFIED = NUM_BLOCKS - 5
+ blocks = bt.get_consecutive_blocks(num_blocks=NUM_BLOCKS)
+ finished_compact_proofs = []
+ for block in blocks:
+ await full_node_1.full_node.add_block(block)
+ await full_node_2.full_node.add_block(block)
+ vdf_info, vdf_proof = get_vdf_info_and_proof(
+ test_constants,
+ ClassgroupElement.get_default_element(),
+ block.reward_chain_block.challenge_chain_ip_vdf.challenge,
+ block.reward_chain_block.challenge_chain_ip_vdf.number_of_iterations,
+ True,
+ )
+ finished_compact_proofs.append(
+ timelord_protocol.RespondCompactProofOfTime(
+ vdf_info,
+ vdf_proof,
+ block.header_hash,
+ block.height,
+ CompressibleVDFField.CC_IP_VDF,
+ )
+ )
+
+ async def coro(full_node, compact_proof):
+ await full_node.respond_compact_proof_of_time(compact_proof)
+
+ full_node_1.full_node._compact_vdf_sem = LimitedSemaphore.create(active_limit=1, waiting_limit=2)
+ tasks = asyncio.gather(
+ *[coro(full_node_1, respond_compact_proof) for respond_compact_proof in finished_compact_proofs]
+ )
+ await tasks
+ stored_blocks = await full_node_1.get_all_full_blocks()
+ compactified = 0
+ for block in stored_blocks:
+ if block.challenge_chain_ip_proof.normalized_to_identity:
+ compactified += 1
+ assert compactified == 3
+
+ # The other full node receives the compact messages one at a time.
+ for respond_compact_proof in finished_compact_proofs:
+ await full_node_2.full_node.add_compact_proof_of_time(respond_compact_proof)
+ stored_blocks = await full_node_2.get_all_full_blocks()
+ compactified = 0
+ for block in stored_blocks:
+ if block.challenge_chain_ip_proof.normalized_to_identity:
+ compactified += 1
+ assert compactified == EXPECTED_COMPACTIFIED
+
@pytest.mark.parametrize(
argnames=["custom_capabilities", "expect_success"],
argvalues=[
diff --git a/tests/core/mempool/test_mempool.py b/tests/core/mempool/test_mempool.py
--- a/tests/core/mempool/test_mempool.py
+++ b/tests/core/mempool/test_mempool.py
@@ -2,6 +2,7 @@
import dataclasses
import logging
+import random
from typing import Callable, Dict, List, Optional, Tuple
import pytest
@@ -15,7 +16,7 @@
from chia.full_node.fee_estimation import EmptyMempoolInfo, MempoolInfo
from chia.full_node.full_node_api import FullNodeAPI
from chia.full_node.mempool import Mempool
-from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions
+from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions, get_puzzle_and_solution_for_coin
from chia.full_node.mempool_manager import MEMPOOL_MIN_FEE_INCREASE
from chia.full_node.pending_tx_cache import ConflictTxCache, PendingTxCache
from chia.protocols import full_node_protocol, wallet_protocol
@@ -27,13 +28,14 @@
from chia.simulator.wallet_tools import WalletTool
from chia.types.announcement import Announcement
from chia.types.blockchain_format.coin import Coin
-from chia.types.blockchain_format.program import INFINITE_COST, Program
+from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.serialized_program import SerializedProgram
from chia.types.blockchain_format.sized_bytes import bytes32, bytes48
from chia.types.clvm_cost import CLVMCost
from chia.types.coin_spend import CoinSpend
from chia.types.condition_opcodes import ConditionOpcode
from chia.types.condition_with_args import ConditionWithArgs
+from chia.types.eligible_coin_spends import run_for_cost
from chia.types.fee_rate import FeeRate
from chia.types.generator_types import BlockGenerator
from chia.types.mempool_inclusion_status import MempoolInclusionStatus
@@ -41,18 +43,21 @@
from chia.types.spend_bundle import SpendBundle
from chia.types.spend_bundle_conditions import Spend, SpendBundleConditions
from chia.util.api_decorators import api_request
-from chia.util.condition_tools import (
- conditions_for_solution,
- parse_sexp_to_conditions,
- pkm_pairs,
- pkm_pairs_for_conditions_dict,
-)
+from chia.util.condition_tools import parse_sexp_to_conditions, pkm_pairs, pkm_pairs_for_conditions_dict
from chia.util.errors import ConsensusError, Err
from chia.util.hash import std_hash
from chia.util.ints import uint32, uint64
from chia.util.recursive_replace import recursive_replace
from tests.blockchain.blockchain_test_utils import _validate_and_add_block
from tests.connection_utils import add_dummy_connection, connect_and_get_peer
+from tests.core.mempool.test_mempool_manager import (
+ IDENTITY_PUZZLE_HASH,
+ TEST_COIN,
+ make_test_coins,
+ mempool_item_from_spendbundle,
+ mk_item,
+ spend_bundle_from_conditions,
+)
from tests.core.node_height import node_height_at_least
from tests.util.misc import assert_runtime
@@ -1690,14 +1695,11 @@ async def test_agg_sig_condition(self, one_node_one_block, wallet_a):
unsigned: List[CoinSpend] = spend_bundle_0.coin_spends
assert len(unsigned) == 1
- coin_spend: CoinSpend = unsigned[0]
-
- err, con, cost = conditions_for_solution(coin_spend.puzzle_reveal, coin_spend.solution, INFINITE_COST)
- assert con is not None
+ # coin_spend: CoinSpend = unsigned[0]
# TODO(straya): fix this test
# puzzle, solution = list(coin_spend.solution.as_iter())
- # conditions_dict = conditions_by_opcode(con)
+ # conditions_dict = conditions_dict_for_solution(coin_spend.puzzle_reveal, coin_spend.solution, INFINITE_COST)
# pkm_pairs = pkm_pairs_for_conditions_dict(conditions_dict, coin_spend.coin.name())
# assert len(pkm_pairs) == 1
@@ -2458,7 +2460,7 @@ def test_create_coin_duplicates(self, request: pytest.FixtureRequest, softfork_h
# duplicate
condition = CREATE_COIN.format(num=600000)
- with assert_runtime(seconds=0.8, label=request.node.name):
+ with assert_runtime(seconds=1, label=request.node.name):
npc_result = generator_condition_tester(condition, quote=False, height=softfork_height)
assert npc_result.error == Err.DUPLICATE_OUTPUT.value
@@ -2523,21 +2525,21 @@ class TestPkmPairs:
pk1 = G1Element.generator()
pk2 = G1Element.generator()
- def test_empty_list(self, softfork):
+ def test_empty_list(self):
conds = SpendBundleConditions([], 0, 0, 0, None, None, [], 0, 0, 0)
- pks, msgs = pkm_pairs(conds, b"foobar", soft_fork=softfork)
+ pks, msgs = pkm_pairs(conds, b"foobar")
assert pks == []
assert msgs == []
- def test_no_agg_sigs(self, softfork):
+ def test_no_agg_sigs(self):
# one create coin: h1 amount: 1 and not hint
spends = [Spend(self.h3, self.h4, None, None, None, None, None, None, [(self.h1, 1, b"")], [], 0)]
conds = SpendBundleConditions(spends, 0, 0, 0, None, None, [], 0, 0, 0)
- pks, msgs = pkm_pairs(conds, b"foobar", soft_fork=softfork)
+ pks, msgs = pkm_pairs(conds, b"foobar")
assert pks == []
assert msgs == []
- def test_agg_sig_me(self, softfork):
+ def test_agg_sig_me(self):
spends = [
Spend(
self.h1,
@@ -2554,22 +2556,22 @@ def test_agg_sig_me(self, softfork):
)
]
conds = SpendBundleConditions(spends, 0, 0, 0, None, None, [], 0, 0, 0)
- pks, msgs = pkm_pairs(conds, b"foobar", soft_fork=softfork)
+ pks, msgs = pkm_pairs(conds, b"foobar")
assert [bytes(pk) for pk in pks] == [bytes(self.pk1), bytes(self.pk2)]
assert msgs == [b"msg1" + self.h1 + b"foobar", b"msg2" + self.h1 + b"foobar"]
- def test_agg_sig_unsafe(self, softfork):
+ def test_agg_sig_unsafe(self):
conds = SpendBundleConditions(
[], 0, 0, 0, None, None, [(bytes48(self.pk1), b"msg1"), (bytes48(self.pk2), b"msg2")], 0, 0, 0
)
- pks, msgs = pkm_pairs(conds, b"foobar", soft_fork=softfork)
+ pks, msgs = pkm_pairs(conds, b"foobar")
assert [bytes(pk) for pk in pks] == [bytes(self.pk1), bytes(self.pk2)]
assert msgs == [b"msg1", b"msg2"]
- def test_agg_sig_mixed(self, softfork):
+ def test_agg_sig_mixed(self):
spends = [Spend(self.h1, self.h2, None, None, None, None, None, None, [], [(bytes48(self.pk1), b"msg1")], 0)]
conds = SpendBundleConditions(spends, 0, 0, 0, None, None, [(bytes48(self.pk2), b"msg2")], 0, 0, 0)
- pks, msgs = pkm_pairs(conds, b"foobar", soft_fork=softfork)
+ pks, msgs = pkm_pairs(conds, b"foobar")
assert [bytes(pk) for pk in pks] == [bytes(self.pk2), bytes(self.pk1)]
assert msgs == [b"msg2", b"msg1" + self.h1 + b"foobar"]
@@ -2577,25 +2579,17 @@ def test_agg_sig_unsafe_restriction(self) -> None:
conds = SpendBundleConditions(
[], 0, 0, 0, None, None, [(bytes48(self.pk1), b"msg1"), (bytes48(self.pk2), b"msg2")], 0, 0, 0
)
- pks, msgs = pkm_pairs(conds, b"msg1", soft_fork=False)
- assert [bytes(pk) for pk in pks] == [bytes(self.pk1), bytes(self.pk2)]
- assert msgs == [b"msg1", b"msg2"]
-
- pks, msgs = pkm_pairs(conds, b"msg2", soft_fork=False)
- assert [bytes(pk) for pk in pks] == [bytes(self.pk1), bytes(self.pk2)]
- assert msgs == [b"msg1", b"msg2"]
-
with pytest.raises(ConsensusError, match="INVALID_CONDITION"):
- pkm_pairs(conds, b"msg1", soft_fork=True)
+ pkm_pairs(conds, b"msg1")
with pytest.raises(ConsensusError, match="INVALID_CONDITION"):
- pkm_pairs(conds, b"sg1", soft_fork=True)
+ pkm_pairs(conds, b"sg1")
with pytest.raises(ConsensusError, match="INVALID_CONDITION"):
- pkm_pairs(conds, b"msg2", soft_fork=True)
+ pkm_pairs(conds, b"msg2")
with pytest.raises(ConsensusError, match="INVALID_CONDITION"):
- pkm_pairs(conds, b"g2", soft_fork=True)
+ pkm_pairs(conds, b"g2")
class TestPkmPairsForConditionDict:
@@ -2626,36 +2620,328 @@ def test_agg_sig_unsafe_restriction(self) -> None:
class TestParseSexpCondition:
def test_basic(self) -> None:
- err, conds = parse_sexp_to_conditions(Program.to([[bytes([49]), b"foo", b"bar"]]))
- assert err is None
+ conds = parse_sexp_to_conditions(Program.to([[bytes([49]), b"foo", b"bar"]]))
assert conds == [ConditionWithArgs(ConditionOpcode.AGG_SIG_UNSAFE, [b"foo", b"bar"])]
def test_oversized_op(self) -> None:
- err, conds = parse_sexp_to_conditions(Program.to([[bytes([49, 49]), b"foo", b"bar"]]))
- assert err is Err.INVALID_CONDITION
- assert conds is None
+ with pytest.raises(ConsensusError):
+ parse_sexp_to_conditions(Program.to([[bytes([49, 49]), b"foo", b"bar"]]))
def test_empty_op(self) -> None:
- err, conds = parse_sexp_to_conditions(Program.to([[b"", b"foo", b"bar"]]))
- assert err is Err.INVALID_CONDITION
- assert conds is None
+ with pytest.raises(ConsensusError):
+ parse_sexp_to_conditions(Program.to([[b"", b"foo", b"bar"]]))
def test_list_op(self) -> None:
- err, conds = parse_sexp_to_conditions(Program.to([[[bytes([49])], b"foo", b"bar"]]))
- assert err is Err.INVALID_CONDITION
- assert conds is None
+ with pytest.raises(ConsensusError):
+ parse_sexp_to_conditions(Program.to([[[bytes([49])], b"foo", b"bar"]]))
def test_list_arg(self) -> None:
- err, conds = parse_sexp_to_conditions(Program.to([[bytes([49]), [b"foo", b"bar"]]]))
- assert err is None
+ conds = parse_sexp_to_conditions(Program.to([[bytes([49]), [b"foo", b"bar"]]]))
assert conds == [ConditionWithArgs(ConditionOpcode.AGG_SIG_UNSAFE, [])]
def test_list_arg_truncate(self) -> None:
- err, conds = parse_sexp_to_conditions(Program.to([[bytes([49]), b"baz", [b"foo", b"bar"]]]))
- assert err is None
+ conds = parse_sexp_to_conditions(Program.to([[bytes([49]), b"baz", [b"foo", b"bar"]]]))
assert conds == [ConditionWithArgs(ConditionOpcode.AGG_SIG_UNSAFE, [b"baz"])]
def test_arg_limit(self) -> None:
- err, conds = parse_sexp_to_conditions(Program.to([[bytes([49]), b"1", b"2", b"3", b"4", b"5", b"6"]]))
- assert err is None
+ conds = parse_sexp_to_conditions(Program.to([[bytes([49]), b"1", b"2", b"3", b"4", b"5", b"6"]]))
assert conds == [ConditionWithArgs(ConditionOpcode.AGG_SIG_UNSAFE, [b"1", b"2", b"3", b"4"])]
+
+
+coins = make_test_coins()
+
+
+# This test makes sure we're properly sorting items by fee rate
+@pytest.mark.parametrize(
+ "items,expected",
+ [
+ # make sure fractions of fee-rate are ordered correctly (i.e. that
+ # we don't use integer division)
+ (
+ [
+ mk_item(coins[0:1], fee=110, cost=50),
+ mk_item(coins[1:2], fee=100, cost=50),
+ mk_item(coins[2:3], fee=105, cost=50),
+ ],
+ [coins[0], coins[2], coins[1]],
+ ),
+ # make sure insertion order is a tie-breaker for items with the same
+ # fee-rate
+ (
+ [
+ mk_item(coins[0:1], fee=100, cost=50),
+ mk_item(coins[1:2], fee=100, cost=50),
+ mk_item(coins[2:3], fee=100, cost=50),
+ ],
+ [coins[0], coins[1], coins[2]],
+ ),
+ # also for items that don't pay fees
+ (
+ [
+ mk_item(coins[2:3], fee=0, cost=50),
+ mk_item(coins[1:2], fee=0, cost=50),
+ mk_item(coins[0:1], fee=0, cost=50),
+ ],
+ [coins[2], coins[1], coins[0]],
+ ),
+ ],
+)
+def test_items_by_feerate(items: List[MempoolItem], expected: List[Coin]) -> None:
+ fee_estimator = create_bitcoin_fee_estimator(uint64(11000000000))
+
+ mempool_info = MempoolInfo(
+ CLVMCost(uint64(11000000000 * 3)),
+ FeeRate(uint64(1000000)),
+ CLVMCost(uint64(11000000000)),
+ )
+ mempool = Mempool(mempool_info, fee_estimator)
+ for i in items:
+ mempool.add_to_pool(i)
+
+ ordered_items = list(mempool.items_by_feerate())
+
+ assert len(ordered_items) == len(expected)
+
+ last_fpc: Optional[float] = None
+ for mi, expected_coin in zip(ordered_items, expected):
+ assert len(mi.spend_bundle.coin_spends) == 1
+ assert mi.spend_bundle.coin_spends[0].coin == expected_coin
+ assert last_fpc is None or last_fpc >= mi.fee_per_cost
+ last_fpc = mi.fee_per_cost
+
+
+def rand_hash() -> bytes32:
+ rng = random.Random()
+ ret = bytearray(32)
+ for i in range(32):
+ ret[i] = rng.getrandbits(8)
+ return bytes32(ret)
+
+
+def item_cost(cost: int, fee_rate: float) -> MempoolItem:
+ fee = cost * fee_rate
+ amount = int(fee + 100)
+ coin = Coin(rand_hash(), rand_hash(), amount)
+ return mk_item([coin], cost=cost, fee=int(cost * fee_rate))
+
+
+@pytest.mark.parametrize(
+ "items,add,expected",
+ [
+ # the max size is 100
+ # we need to evict two items
+ ([50, 25, 13, 12, 5], 10, [10, 50, 25, 13]),
+ # we don't need to evict anything
+ ([50, 25, 13], 10, [10, 50, 25, 13]),
+ # we need to evict everything
+ ([95, 5], 10, [10]),
+ # we evict a single item
+ ([75, 15, 9], 10, [10, 75, 15]),
+ ],
+)
+def test_full_mempool(items: List[int], add: int, expected: List[int]) -> None:
+ fee_estimator = create_bitcoin_fee_estimator(uint64(11000000000))
+
+ mempool_info = MempoolInfo(
+ CLVMCost(uint64(100)),
+ FeeRate(uint64(1000000)),
+ CLVMCost(uint64(100)),
+ )
+ mempool = Mempool(mempool_info, fee_estimator)
+ fee_rate: float = 3.0
+ for i in items:
+ mempool.add_to_pool(item_cost(i, fee_rate))
+ fee_rate -= 0.1
+
+ # now, add the item we're testing
+ mempool.add_to_pool(item_cost(add, 3.1))
+
+ ordered_items = list(mempool.items_by_feerate())
+
+ assert len(ordered_items) == len(expected)
+
+ for mi, expected_cost in zip(ordered_items, expected):
+ assert mi.cost == expected_cost
+
+
+@pytest.mark.parametrize("height", [True, False])
+@pytest.mark.parametrize(
+ "items,expected,increase_fee",
+ [
+ # the max size is 100
+ # the max block size is 50
+ # which is also the max size for expiring transactions
+ # the increasing fee will order the transactions in the reverse
+ # insertion order
+ ([10, 11, 12, 13, 14], [14, 13, 12, 11], True),
+ # decreasing fee rate will make the last one fail to be inserted
+ ([10, 11, 12, 13, 14], [10, 11, 12, 13], False),
+ # the last is big enough to evict all previous ones
+ ([10, 11, 12, 13, 50], [50], True),
+ # the last one will not evict any earlier ones, because the fee rate is
+ # lower
+ ([10, 11, 12, 13, 50], [10, 11, 12, 13], False),
+ ],
+)
+def test_limit_expiring_transactions(height: bool, items: List[int], expected: List[int], increase_fee: bool) -> None:
+ fee_estimator = create_bitcoin_fee_estimator(uint64(11000000000))
+
+ mempool_info = MempoolInfo(
+ CLVMCost(uint64(100)),
+ FeeRate(uint64(1000000)),
+ CLVMCost(uint64(50)),
+ )
+ mempool = Mempool(mempool_info, fee_estimator)
+ mempool.new_tx_block(uint32(10), uint64(100000))
+
+ # fill the mempool with regular transactions (without expiration)
+ fee_rate: float = 3.0
+ for i in range(1, 20):
+ mempool.add_to_pool(item_cost(i, fee_rate))
+ fee_rate -= 0.1
+
+ # now add the expiring transactions from the test case
+ fee_rate = 2.7
+ for cost in items:
+ fee = cost * fee_rate
+ amount = int(fee + 100)
+ coin = Coin(rand_hash(), rand_hash(), amount)
+ if height:
+ ret = mempool.add_to_pool(mk_item([coin], cost=cost, fee=int(cost * fee_rate), assert_before_height=15))
+ else:
+ ret = mempool.add_to_pool(mk_item([coin], cost=cost, fee=int(cost * fee_rate), assert_before_seconds=10400))
+ if increase_fee:
+ fee_rate += 0.1
+ assert ret is None
+ else:
+ fee_rate -= 0.1
+
+ ordered_costs = [
+ item.cost
+ for item in mempool.items_by_feerate()
+ if item.assert_before_height is not None or item.assert_before_seconds is not None
+ ]
+
+ assert ordered_costs == expected
+
+ print("")
+ for item in mempool.items_by_feerate():
+ if item.assert_before_seconds is not None or item.assert_before_height is not None:
+ ttl = "yes"
+ else:
+ ttl = "No"
+ print(f"- cost: {item.cost} TTL: {ttl}")
+
+ assert mempool.total_mempool_cost() > 90
+
+
+@pytest.mark.parametrize(
+ "items,coin_ids,expected",
+ [
+ # None of these spend those coins
+ (
+ [mk_item(coins[0:1]), mk_item(coins[1:2]), mk_item(coins[2:3])],
+ [coins[3].name(), coins[4].name()],
+ [],
+ ),
+ # One of these spends one of the coins
+ (
+ [mk_item(coins[0:1]), mk_item(coins[1:2]), mk_item(coins[2:3])],
+ [coins[1].name(), coins[3].name()],
+ [mk_item(coins[1:2])],
+ ),
+ # One of these spends one another spends two
+ (
+ [mk_item(coins[0:1]), mk_item(coins[1:3]), mk_item(coins[2:4]), mk_item(coins[3:4])],
+ [coins[2].name(), coins[3].name()],
+ [mk_item(coins[1:3]), mk_item(coins[2:4]), mk_item(coins[3:4])],
+ ),
+ ],
+)
+def test_get_items_by_coin_ids(items: List[MempoolItem], coin_ids: List[bytes32], expected: List[MempoolItem]) -> None:
+ fee_estimator = create_bitcoin_fee_estimator(uint64(11000000000))
+ mempool_info = MempoolInfo(
+ CLVMCost(uint64(11000000000 * 3)),
+ FeeRate(uint64(1000000)),
+ CLVMCost(uint64(11000000000)),
+ )
+ mempool = Mempool(mempool_info, fee_estimator)
+ for i in items:
+ mempool.add_to_pool(i)
+ result = mempool.get_items_by_coin_ids(coin_ids)
+ assert set(result) == set(expected)
+
+
+def test_aggregating_on_a_solution_then_a_more_cost_saving_one_appears() -> None:
+ def always(_: bytes32) -> bool:
+ return True
+
+ def make_test_spendbundle(coin: Coin, *, fee: int = 0, with_higher_cost: bool = False) -> SpendBundle:
+ conditions = []
+ actual_fee = fee
+ if with_higher_cost:
+ conditions.extend([[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, i] for i in range(3)])
+ actual_fee += 3
+ conditions.append([ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, coin.amount - actual_fee])
+ sb = spend_bundle_from_conditions(conditions, coin)
+ return sb
+
+ def agg_and_add_sb_returning_cost_info(mempool: Mempool, spend_bundles: List[SpendBundle]) -> uint64:
+ sb = SpendBundle.aggregate(spend_bundles)
+ mi = mempool_item_from_spendbundle(sb)
+ mempool.add_to_pool(mi)
+ saved_cost = run_for_cost(
+ sb.coin_spends[0].puzzle_reveal, sb.coin_spends[0].solution, len(mi.additions), mi.cost
+ )
+ return saved_cost
+
+ fee_estimator = create_bitcoin_fee_estimator(uint64(11000000000))
+ mempool_info = MempoolInfo(
+ CLVMCost(uint64(11000000000 * 3)),
+ FeeRate(uint64(1000000)),
+ CLVMCost(uint64(11000000000)),
+ )
+ mempool = Mempool(mempool_info, fee_estimator)
+ coins = [
+ Coin(IDENTITY_PUZZLE_HASH, IDENTITY_PUZZLE_HASH, uint64(amount)) for amount in range(2000000000, 2000000010)
+ ]
+ # Create a ~10 FPC item that spends the eligible coin[0]
+ sb_A = make_test_spendbundle(coins[0])
+ highest_fee = 58282830
+ sb_high_rate = make_test_spendbundle(coins[1], fee=highest_fee)
+ agg_and_add_sb_returning_cost_info(mempool, [sb_A, sb_high_rate])
+ # Create a ~2 FPC item that spends the eligible coin using the same solution A
+ sb_low_rate = make_test_spendbundle(coins[2], fee=highest_fee // 5)
+ saved_cost_on_solution_A = agg_and_add_sb_returning_cost_info(mempool, [sb_A, sb_low_rate])
+ result = mempool.create_bundle_from_mempool_items(always)
+ assert result is not None
+ agg, _ = result
+ # Make sure both items would be processed
+ assert [c.coin for c in agg.coin_spends] == [coins[0], coins[1], coins[2]]
+ # Now let's add 3 x ~3 FPC items that spend the eligible coin differently
+ # (solution B). It creates a higher (saved) cost than solution A
+ sb_B = make_test_spendbundle(coins[0], with_higher_cost=True)
+ for i in range(3, 6):
+ # We're picking this fee to get a ~3 FPC, and get picked after sb_A1
+ # (which has ~10 FPC) but before sb_A2 (which has ~2 FPC)
+ sb_mid_rate = make_test_spendbundle(coins[i], fee=38004852 - i)
+ saved_cost_on_solution_B = agg_and_add_sb_returning_cost_info(mempool, [sb_B, sb_mid_rate])
+ # We'd save more cost if we went with solution B instead of A
+ assert saved_cost_on_solution_B > saved_cost_on_solution_A
+ # If we process everything now, the 3 x ~3 FPC items get skipped because
+ # sb_A1 gets picked before them (~10 FPC), so from then on only sb_A2 (~2 FPC)
+ # would get picked
+ result = mempool.create_bundle_from_mempool_items(always)
+ assert result is not None
+ agg, _ = result
+ # The 3 items got skipped here
+ # We ran with solution A and missed bigger savings on solution B
+ assert mempool.size() == 5
+ assert [c.coin for c in agg.coin_spends] == [coins[0], coins[1], coins[2]]
+
+
+def test_get_puzzle_and_solution_for_coin_failure():
+ with pytest.raises(
+ ValueError, match=f"Failed to get puzzle and solution for coin {TEST_COIN}, error: failed to fill whole buffer"
+ ):
+ get_puzzle_and_solution_for_coin(BlockGenerator(SerializedProgram(), [], []), TEST_COIN)
diff --git a/tests/core/mempool/test_mempool_manager.py b/tests/core/mempool/test_mempool_manager.py
--- a/tests/core/mempool/test_mempool_manager.py
+++ b/tests/core/mempool/test_mempool_manager.py
@@ -1,16 +1,18 @@
from __future__ import annotations
from dataclasses import dataclass
-from typing import Any, Awaitable, Callable, Dict, List, Optional, Tuple
+from typing import Any, Awaitable, Callable, Dict, List, Optional, Set, Tuple
import pytest
from blspy import G1Element, G2Element
+from chia_rs import ELIGIBLE_FOR_DEDUP
from chiabip158 import PyBIP158
from chia.consensus.constants import ConsensusConstants
from chia.consensus.cost_calculator import NPCResult
from chia.consensus.default_constants import DEFAULT_CONSTANTS
-from chia.full_node.mempool_check_conditions import mempool_check_time_locks
+from chia.full_node.bundle_tools import simple_solution_generator
+from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions, mempool_check_time_locks
from chia.full_node.mempool_manager import (
MEMPOOL_MIN_FEE_INCREASE,
MempoolManager,
@@ -20,21 +22,33 @@
optional_max,
optional_min,
)
+from chia.protocols import wallet_protocol
+from chia.protocols.protocol_message_types import ProtocolMessageTypes
+from chia.simulator.full_node_simulator import FullNodeSimulator
+from chia.simulator.setup_nodes import SimulatorsAndWallets
+from chia.simulator.simulator_protocol import FarmNewBlockProtocol
+from chia.types.announcement import Announcement
from chia.types.blockchain_format.coin import Coin
-from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.program import INFINITE_COST, Program
from chia.types.blockchain_format.serialized_program import SerializedProgram
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.coin_record import CoinRecord
from chia.types.coin_spend import CoinSpend
from chia.types.condition_opcodes import ConditionOpcode
+from chia.types.eligible_coin_spends import DedupCoinSpend, EligibleCoinSpends, run_for_cost
from chia.types.mempool_inclusion_status import MempoolInclusionStatus
-from chia.types.mempool_item import MempoolItem
+from chia.types.mempool_item import BundleCoinSpend, MempoolItem
+from chia.types.peer_info import PeerInfo
from chia.types.spend_bundle import SpendBundle
from chia.types.spend_bundle_conditions import Spend, SpendBundleConditions
from chia.util.errors import Err, ValidationError
-from chia.util.ints import uint32, uint64
+from chia.util.ints import uint16, uint32, uint64
+from chia.wallet.payment import Payment
+from chia.wallet.wallet import Wallet
+from chia.wallet.wallet_coin_record import WalletCoinRecord
+from chia.wallet.wallet_node import WalletNode
-IDENTITY_PUZZLE = Program.to(1)
+IDENTITY_PUZZLE = SerializedProgram.from_program(Program.to(1))
IDENTITY_PUZZLE_HASH = IDENTITY_PUZZLE.get_tree_hash()
TEST_TIMESTAMP = uint64(10040)
@@ -138,11 +152,12 @@ def make_test_conds(
before_seconds_relative: Optional[int] = None,
before_seconds_absolute: Optional[int] = None,
cost: int = 0,
+ spend_ids: List[bytes32] = [TEST_COIN_ID],
) -> SpendBundleConditions:
return SpendBundleConditions(
[
Spend(
- TEST_COIN.name(),
+ spend_id,
IDENTITY_PUZZLE_HASH,
None if height_relative is None else uint32(height_relative),
None if seconds_relative is None else uint64(seconds_relative),
@@ -154,6 +169,7 @@ def make_test_conds(
[],
0,
)
+ for spend_id in spend_ids
],
0,
uint32(height_absolute),
@@ -323,6 +339,46 @@ async def generate_and_add_spendbundle(
return (sb, sb_name, result)
+def make_bundle_spends_map_and_fee(
+ spend_bundle: SpendBundle, npc_result: NPCResult
+) -> Tuple[Dict[bytes32, BundleCoinSpend], uint64]:
+ bundle_coin_spends: Dict[bytes32, BundleCoinSpend] = {}
+ eligibility_and_additions: Dict[bytes32, Tuple[bool, List[Coin]]] = {}
+ removals_amount = 0
+ additions_amount = 0
+ assert npc_result.conds is not None
+ for spend in npc_result.conds.spends:
+ coin_id = bytes32(spend.coin_id)
+ spend_additions = []
+ for puzzle_hash, amount, _ in spend.create_coin:
+ spend_additions.append(Coin(coin_id, puzzle_hash, amount))
+ additions_amount += amount
+ eligibility_and_additions[coin_id] = (bool(spend.flags & ELIGIBLE_FOR_DEDUP), spend_additions)
+ for coin_spend in spend_bundle.coin_spends:
+ coin_id = coin_spend.coin.name()
+ removals_amount += coin_spend.coin.amount
+ eligible_for_dedup, spend_additions = eligibility_and_additions.get(coin_id, (False, []))
+ bundle_coin_spends[coin_id] = BundleCoinSpend(coin_spend, eligible_for_dedup, spend_additions)
+ fee = uint64(removals_amount - additions_amount)
+ return bundle_coin_spends, fee
+
+
+def mempool_item_from_spendbundle(spend_bundle: SpendBundle) -> MempoolItem:
+ generator = simple_solution_generator(spend_bundle)
+ npc_result = get_name_puzzle_conditions(
+ generator=generator, max_cost=INFINITE_COST, mempool_mode=True, height=uint32(0)
+ )
+ bundle_coin_spends, fee = make_bundle_spends_map_and_fee(spend_bundle, npc_result)
+ return MempoolItem(
+ spend_bundle=spend_bundle,
+ fee=fee,
+ npc_result=npc_result,
+ spend_bundle_name=spend_bundle.name(),
+ height_added_to_mempool=TEST_HEIGHT,
+ bundle_coin_spends=bundle_coin_spends,
+ )
+
+
@pytest.mark.asyncio
async def test_empty_spend_bundle() -> None:
mempool_manager = await instantiate_mempool_manager(zero_calls_get_coin_record)
@@ -619,11 +675,11 @@ def mk_item(
# can_replace()
spends = [CoinSpend(c, SerializedProgram(), SerializedProgram()) for c in coins]
spend_bundle = SpendBundle(spends, G2Element())
- npc_results = NPCResult(None, make_test_conds(cost=cost), uint64(cost))
+ npc_result = NPCResult(None, make_test_conds(cost=cost, spend_ids=[c.name() for c in coins]), uint64(cost))
return MempoolItem(
spend_bundle,
uint64(fee),
- npc_results,
+ npc_result,
spend_bundle.name(),
uint32(0),
None if assert_height is None else uint32(assert_height),
@@ -775,9 +831,8 @@ def make_test_coins() -> List[Coin]:
],
)
def test_can_replace(existing_items: List[MempoolItem], new_item: MempoolItem, expected: bool) -> None:
-
removals = set(c.name() for c in new_item.spend_bundle.removals())
- assert can_replace(set(existing_items), removals, new_item) == expected
+ assert can_replace(existing_items, removals, new_item) == expected
@pytest.mark.asyncio
@@ -827,7 +882,6 @@ async def test_get_items_not_in_filter() -> None:
@pytest.mark.asyncio
async def test_total_mempool_fees() -> None:
-
coin_records: Dict[bytes32, CoinRecord] = {}
async def get_coin_record(coin_id: bytes32) -> Optional[CoinRecord]:
@@ -919,33 +973,35 @@ async def make_and_send_big_cost_sb(coin: Coin) -> None:
assert mempool_manager.peak is not None
result = mempool_manager.create_bundle_from_mempool(mempool_manager.peak.header_hash)
assert result is not None
- agg, additions, removals = result
+ agg, additions = result
# The second spend bundle has a higher FPC so it should get picked first
assert agg == sb2
# The first spend bundle hits the maximum block clvm cost and gets skipped
assert additions == [Coin(coins[1].name(), IDENTITY_PUZZLE_HASH, coins[1].amount - 2)]
- assert removals == [coins[1]]
+ assert agg.removals() == [coins[1]]
@pytest.mark.parametrize(
- "opcode,arg,expect_eviction",
+ "opcode,arg,expect_eviction, expect_limit",
[
# current height: 10 current_time: 10000
# we step the chain forward 1 block and 19 seconds
- (co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10001, True),
- (co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10019, True),
- (co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10020, False),
- (co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 11, True),
- (co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 12, False),
+ (co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10001, True, None),
+ (co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10019, True, None),
+ (co.ASSERT_BEFORE_SECONDS_ABSOLUTE, 10020, False, 10020),
+ (co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 11, True, None),
+ (co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, 12, False, 12),
# the coin was created at height: 5 timestamp: 9900
- (co.ASSERT_BEFORE_HEIGHT_RELATIVE, 6, True),
- (co.ASSERT_BEFORE_HEIGHT_RELATIVE, 7, False),
- (co.ASSERT_BEFORE_SECONDS_RELATIVE, 119, True),
- (co.ASSERT_BEFORE_SECONDS_RELATIVE, 120, False),
+ (co.ASSERT_BEFORE_HEIGHT_RELATIVE, 6, True, None),
+ (co.ASSERT_BEFORE_HEIGHT_RELATIVE, 7, False, 5 + 7),
+ (co.ASSERT_BEFORE_SECONDS_RELATIVE, 119, True, None),
+ (co.ASSERT_BEFORE_SECONDS_RELATIVE, 120, False, 9900 + 120),
],
)
@pytest.mark.asyncio
-async def test_assert_before_expiration(opcode: ConditionOpcode, arg: int, expect_eviction: bool) -> None:
+async def test_assert_before_expiration(
+ opcode: ConditionOpcode, arg: int, expect_eviction: bool, expect_limit: Optional[int]
+) -> None:
async def get_coin_record(coin_id: bytes32) -> Optional[CoinRecord]:
return {TEST_COIN.name(): CoinRecord(TEST_COIN, uint32(5), uint32(0), False, uint64(9900))}.get(coin_id)
@@ -973,13 +1029,22 @@ async def get_coin_record(coin_id: bytes32) -> Optional[CoinRecord]:
still_in_pool = mempool_manager.get_spendbundle(bundle_name) == bundle
assert still_in_pool != expect_eviction
-
-
-def make_test_spendbundle(coin: Coin, *, fee: int = 0) -> SpendBundle:
- conditions = [
- [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, uint64(coin.amount - fee)],
- [ConditionOpcode.AGG_SIG_UNSAFE, G1Element(), IDENTITY_PUZZLE_HASH],
- ]
+ if still_in_pool:
+ assert expect_limit is not None
+ item = mempool_manager.get_mempool_item(bundle_name)
+ assert item is not None
+ if opcode in [co.ASSERT_BEFORE_SECONDS_ABSOLUTE, co.ASSERT_BEFORE_SECONDS_RELATIVE]:
+ assert item.assert_before_seconds == expect_limit
+ elif opcode in [co.ASSERT_BEFORE_HEIGHT_ABSOLUTE, co.ASSERT_BEFORE_HEIGHT_RELATIVE]:
+ assert item.assert_before_height == expect_limit
+ else:
+ assert False
+
+
+def make_test_spendbundle(coin: Coin, *, fee: int = 0, eligible_spend: bool = False) -> SpendBundle:
+ conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, uint64(coin.amount - fee)]]
+ if not eligible_spend:
+ conditions.append([ConditionOpcode.AGG_SIG_UNSAFE, G1Element(), IDENTITY_PUZZLE_HASH])
return spend_bundle_from_conditions(conditions, coin)
@@ -1103,3 +1168,395 @@ async def test_sufficient_total_fpc_increase() -> None:
assert_sb_in_pool(mempool_manager, sb1234)
assert_sb_not_in_pool(mempool_manager, sb12)
assert_sb_not_in_pool(mempool_manager, sb3)
+
+
+@pytest.mark.asyncio
+async def test_replace_with_extra_eligible_coin() -> None:
+ mempool_manager, coins = await setup_mempool_with_coins(coin_amounts=list(range(1000000000, 1000000010)))
+ sb1234 = SpendBundle.aggregate([make_test_spendbundle(coins[i]) for i in range(4)])
+ await send_spendbundle(mempool_manager, sb1234)
+ assert_sb_in_pool(mempool_manager, sb1234)
+ # Replace sb1234 with sb1234_2 which spends an eligible coin additionally
+ eligible_sb = make_test_spendbundle(coins[4], fee=MEMPOOL_MIN_FEE_INCREASE, eligible_spend=True)
+ sb1234_2 = SpendBundle.aggregate([sb1234, eligible_sb])
+ await send_spendbundle(mempool_manager, sb1234_2)
+ assert_sb_not_in_pool(mempool_manager, sb1234)
+ assert_sb_in_pool(mempool_manager, sb1234_2)
+
+
+@pytest.mark.asyncio
+async def test_replacing_one_with_an_eligible_coin() -> None:
+ mempool_manager, coins = await setup_mempool_with_coins(coin_amounts=list(range(1000000000, 1000000010)))
+ sb123 = SpendBundle.aggregate([make_test_spendbundle(coins[i]) for i in range(3)])
+ eligible_sb = make_test_spendbundle(coins[3], eligible_spend=True)
+ sb123e = SpendBundle.aggregate([sb123, eligible_sb])
+ await send_spendbundle(mempool_manager, sb123e)
+ assert_sb_in_pool(mempool_manager, sb123e)
+ # Replace sb123e with sb123e4
+ sb4 = make_test_spendbundle(coins[4], fee=MEMPOOL_MIN_FEE_INCREASE)
+ sb123e4 = SpendBundle.aggregate([sb123e, sb4])
+ await send_spendbundle(mempool_manager, sb123e4)
+ assert_sb_not_in_pool(mempool_manager, sb123e)
+ assert_sb_in_pool(mempool_manager, sb123e4)
+
+
+@pytest.mark.parametrize("amount", [0, 1])
+def test_run_for_cost(amount: int) -> None:
+ conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, amount]]
+ solution = Program.to(conditions)
+ cost = run_for_cost(IDENTITY_PUZZLE, solution, additions_count=1, max_cost=uint64(10000000))
+ assert cost == uint64(1800044)
+
+
+def test_run_for_cost_max_cost() -> None:
+ conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 1]]
+ solution = Program.to(conditions)
+ with pytest.raises(ValueError, match="('cost exceeded', '2b')"):
+ run_for_cost(IDENTITY_PUZZLE, solution, additions_count=1, max_cost=uint64(43))
+
+
+def test_dedup_info_nothing_to_do() -> None:
+ # No eligible coins, nothing to deduplicate, item gets considered normally
+ conditions = [
+ [ConditionOpcode.AGG_SIG_UNSAFE, G1Element(), IDENTITY_PUZZLE_HASH],
+ [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 1],
+ ]
+ sb = spend_bundle_from_conditions(conditions, TEST_COIN)
+ mempool_item = mempool_item_from_spendbundle(sb)
+ eligible_coin_spends = EligibleCoinSpends()
+ unique_coin_spends, cost_saving, unique_additions = eligible_coin_spends.get_deduplication_info(
+ bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.npc_result.cost
+ )
+ assert unique_coin_spends == sb.coin_spends
+ assert cost_saving == 0
+ assert unique_additions == [Coin(TEST_COIN_ID, IDENTITY_PUZZLE_HASH, 1)]
+ assert eligible_coin_spends == EligibleCoinSpends()
+
+
+def test_dedup_info_eligible_1st_time() -> None:
+ # Eligible coin encountered for the first time
+ conditions = [
+ [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 1],
+ [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 2],
+ ]
+ sb = spend_bundle_from_conditions(conditions, TEST_COIN)
+ mempool_item = mempool_item_from_spendbundle(sb)
+ eligible_coin_spends = EligibleCoinSpends()
+ solution = SerializedProgram.from_program(Program.to(conditions))
+ unique_coin_spends, cost_saving, unique_additions = eligible_coin_spends.get_deduplication_info(
+ bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.npc_result.cost
+ )
+ assert unique_coin_spends == sb.coin_spends
+ assert cost_saving == 0
+ assert set(unique_additions) == {
+ Coin(TEST_COIN_ID, IDENTITY_PUZZLE_HASH, 1),
+ Coin(TEST_COIN_ID, IDENTITY_PUZZLE_HASH, 2),
+ }
+ assert eligible_coin_spends == EligibleCoinSpends({TEST_COIN_ID: DedupCoinSpend(solution=solution, cost=None)})
+
+
+def test_dedup_info_eligible_but_different_solution() -> None:
+ # Eligible coin but different solution from the one we encountered
+ initial_conditions = [
+ [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 1],
+ [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 2],
+ ]
+ initial_solution = SerializedProgram.from_program(Program.to(initial_conditions))
+ eligible_coin_spends = EligibleCoinSpends({TEST_COIN_ID: DedupCoinSpend(solution=initial_solution, cost=None)})
+ conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 2]]
+ sb = spend_bundle_from_conditions(conditions, TEST_COIN)
+ mempool_item = mempool_item_from_spendbundle(sb)
+ with pytest.raises(ValueError, match="Solution is different from what we're deduplicating on"):
+ eligible_coin_spends.get_deduplication_info(
+ bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.npc_result.cost
+ )
+
+
+def test_dedup_info_eligible_2nd_time_and_another_1st_time() -> None:
+ # Eligible coin encountered a second time, and another for the first time
+ initial_conditions = [
+ [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 1],
+ [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 2],
+ ]
+ initial_solution = SerializedProgram.from_program(Program.to(initial_conditions))
+ eligible_coin_spends = EligibleCoinSpends({TEST_COIN_ID: DedupCoinSpend(solution=initial_solution, cost=None)})
+ sb1 = spend_bundle_from_conditions(initial_conditions, TEST_COIN)
+ second_conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 3]]
+ second_solution = SerializedProgram.from_program(Program.to(second_conditions))
+ sb2 = spend_bundle_from_conditions(second_conditions, TEST_COIN2)
+ sb = SpendBundle.aggregate([sb1, sb2])
+ mempool_item = mempool_item_from_spendbundle(sb)
+ unique_coin_spends, cost_saving, unique_additions = eligible_coin_spends.get_deduplication_info(
+ bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.npc_result.cost
+ )
+ # Only the eligible one that we encountered more than once gets deduplicated
+ assert unique_coin_spends == sb2.coin_spends
+ saved_cost = uint64(3600044)
+ assert cost_saving == saved_cost
+ assert unique_additions == [Coin(TEST_COIN_ID2, IDENTITY_PUZZLE_HASH, 3)]
+ # The coin we encountered a second time has its cost and additions properly updated
+ # The coin we encountered for the first time gets cost None and an empty set of additions
+ expected_eligible_spends = EligibleCoinSpends(
+ {
+ TEST_COIN_ID: DedupCoinSpend(solution=initial_solution, cost=saved_cost),
+ TEST_COIN_ID2: DedupCoinSpend(solution=second_solution, cost=None),
+ }
+ )
+ assert eligible_coin_spends == expected_eligible_spends
+
+
+def test_dedup_info_eligible_3rd_time_another_2nd_time_and_one_non_eligible() -> None:
+ # Eligible coin encountered a third time, another for the second time and one non eligible
+ initial_conditions = [
+ [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 1],
+ [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 2],
+ ]
+ initial_solution = SerializedProgram.from_program(Program.to(initial_conditions))
+ second_conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 3]]
+ second_solution = SerializedProgram.from_program(Program.to(second_conditions))
+ saved_cost = uint64(3600044)
+ eligible_coin_spends = EligibleCoinSpends(
+ {
+ TEST_COIN_ID: DedupCoinSpend(solution=initial_solution, cost=saved_cost),
+ TEST_COIN_ID2: DedupCoinSpend(solution=second_solution, cost=None),
+ }
+ )
+ sb1 = spend_bundle_from_conditions(initial_conditions, TEST_COIN)
+ sb2 = spend_bundle_from_conditions(second_conditions, TEST_COIN2)
+ sb3_conditions = [
+ [ConditionOpcode.AGG_SIG_UNSAFE, G1Element(), IDENTITY_PUZZLE_HASH],
+ [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 4],
+ ]
+ sb3 = spend_bundle_from_conditions(sb3_conditions, TEST_COIN3)
+ sb = SpendBundle.aggregate([sb1, sb2, sb3])
+ mempool_item = mempool_item_from_spendbundle(sb)
+ unique_coin_spends, cost_saving, unique_additions = eligible_coin_spends.get_deduplication_info(
+ bundle_coin_spends=mempool_item.bundle_coin_spends, max_cost=mempool_item.npc_result.cost
+ )
+ assert unique_coin_spends == sb3.coin_spends
+ saved_cost2 = uint64(1800044)
+ assert cost_saving == saved_cost + saved_cost2
+ assert unique_additions == [Coin(TEST_COIN_ID3, IDENTITY_PUZZLE_HASH, 4)]
+ expected_eligible_spends = EligibleCoinSpends(
+ {
+ TEST_COIN_ID: DedupCoinSpend(initial_solution, saved_cost),
+ TEST_COIN_ID2: DedupCoinSpend(second_solution, saved_cost2),
+ }
+ )
+ assert eligible_coin_spends == expected_eligible_spends
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("new_height_step", [1, 2, -1])
+async def test_coin_spending_different_ways_then_finding_it_spent_in_new_peak(new_height_step: int) -> None:
+ # This test makes sure all mempool items that spend a coin (in different ways)
+ # that shows up as spent in a block, get removed properly.
+ # NOTE: this test's parameter allows us to cover both the optimized and
+ # the reorg code paths
+ new_height = uint32(TEST_HEIGHT + new_height_step)
+ coin = Coin(IDENTITY_PUZZLE_HASH, IDENTITY_PUZZLE_HASH, 100)
+ coin_id = coin.name()
+ test_coin_records = {coin_id: CoinRecord(coin, uint32(0), uint32(0), False, uint64(0))}
+
+ async def get_coin_record(coin_id: bytes32) -> Optional[CoinRecord]:
+ return test_coin_records.get(coin_id)
+
+ mempool_manager = await instantiate_mempool_manager(get_coin_record)
+ # Create a bunch of mempool items that spend the coin in different ways
+ for i in range(3):
+ _, _, result = await generate_and_add_spendbundle(
+ mempool_manager, [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, i]], coin
+ )
+ assert result[1] == MempoolInclusionStatus.SUCCESS
+ assert len(mempool_manager.mempool.get_items_by_coin_id(coin_id)) == 3
+ assert mempool_manager.mempool.size() == 3
+ assert len(list(mempool_manager.mempool.items_by_feerate())) == 3
+ # Setup a new peak where the incoming block has spent the coin
+ # Mark this coin as spent
+ test_coin_records = {coin_id: CoinRecord(coin, uint32(0), TEST_HEIGHT, False, uint64(0))}
+ block_record = create_test_block_record(height=new_height)
+ npc_result = NPCResult(None, make_test_conds(spend_ids=[coin_id]), uint64(0))
+ await mempool_manager.new_peak(block_record, npc_result)
+ # As the coin was a spend in all the mempool items we had, nothing should be left now
+ assert len(mempool_manager.mempool.get_items_by_coin_id(coin_id)) == 0
+ assert mempool_manager.mempool.size() == 0
+ assert len(list(mempool_manager.mempool.items_by_feerate())) == 0
+
+
+@pytest.mark.asyncio
+async def test_bundle_coin_spends() -> None:
+ # This tests the construction of bundle_coin_spends map for mempool items
+ # We're creating sb123e with 4 coins, one of them being eligible
+ mempool_manager, coins = await setup_mempool_with_coins(coin_amounts=list(range(1000000000, 1000000005)))
+ sb123 = SpendBundle.aggregate([make_test_spendbundle(coins[i]) for i in range(3)])
+ eligible_sb = make_test_spendbundle(coins[3], eligible_spend=True)
+ sb123e = SpendBundle.aggregate([sb123, eligible_sb])
+ await send_spendbundle(mempool_manager, sb123e)
+ mi123e = mempool_manager.get_mempool_item(sb123e.name())
+ assert mi123e is not None
+ for i in range(3):
+ assert mi123e.bundle_coin_spends[coins[i].name()] == BundleCoinSpend(
+ coin_spend=sb123.coin_spends[i],
+ eligible_for_dedup=False,
+ additions=[Coin(coins[i].name(), IDENTITY_PUZZLE_HASH, coins[i].amount)],
+ )
+ assert mi123e.bundle_coin_spends[coins[3].name()] == BundleCoinSpend(
+ coin_spend=eligible_sb.coin_spends[0],
+ eligible_for_dedup=True,
+ additions=[Coin(coins[3].name(), IDENTITY_PUZZLE_HASH, coins[3].amount)],
+ )
+
+
+@pytest.mark.asyncio
+async def test_identical_spend_aggregation_e2e(simulator_and_wallet: SimulatorsAndWallets, self_hostname: str) -> None:
+ def get_sb_names_by_coin_id(
+ full_node_api: FullNodeSimulator,
+ spent_coin_id: bytes32,
+ ) -> Set[bytes32]:
+ return set(
+ i.spend_bundle_name
+ for i in full_node_api.full_node.mempool_manager.mempool.get_items_by_coin_id(spent_coin_id)
+ )
+
+ async def send_to_mempool(
+ full_node: FullNodeSimulator, spend_bundle: SpendBundle, *, expecting_conflict: bool = False
+ ) -> None:
+ res = await full_node.send_transaction(wallet_protocol.SendTransaction(spend_bundle))
+ assert res is not None and ProtocolMessageTypes(res.type) == ProtocolMessageTypes.transaction_ack
+ res_parsed = wallet_protocol.TransactionAck.from_bytes(res.data)
+ if expecting_conflict:
+ assert res_parsed.status == MempoolInclusionStatus.PENDING.value
+ assert res_parsed.error == "MEMPOOL_CONFLICT"
+ else:
+ assert res_parsed.status == MempoolInclusionStatus.SUCCESS.value
+
+ async def farm_a_block(full_node_api: FullNodeSimulator, wallet_node: WalletNode, ph: bytes32) -> None:
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=30)
+
+ async def make_setup_and_coins(
+ full_node_api: FullNodeSimulator, wallet_node: WalletNode
+ ) -> Tuple[Wallet, list[WalletCoinRecord], bytes32]:
+ wallet = wallet_node.wallet_state_manager.main_wallet
+ ph = await wallet.get_new_puzzlehash()
+ phs = [await wallet.get_new_puzzlehash() for _ in range(3)]
+ for _ in range(2):
+ await farm_a_block(full_node_api, wallet_node, ph)
+ other_recipients = [Payment(puzzle_hash=p, amount=uint64(200), memos=[]) for p in phs[1:]]
+ tx = await wallet.generate_signed_transaction(uint64(200), phs[0], primaries=other_recipients)
+ assert tx.spend_bundle is not None
+ await send_to_mempool(full_node_api, tx.spend_bundle)
+ await farm_a_block(full_node_api, wallet_node, ph)
+ coins = list(await wallet_node.wallet_state_manager.coin_store.get_unspent_coins_for_wallet(1))
+ # Two blocks farmed plus 3 transactions
+ assert len(coins) == 7
+ return (wallet, coins, ph)
+
+ [[full_node_api], [[wallet_node, wallet_server]], _] = simulator_and_wallet
+ server = full_node_api.full_node.server
+ await wallet_server.start_client(PeerInfo(self_hostname, uint16(server._port)), None)
+ wallet, coins, ph = await make_setup_and_coins(full_node_api, wallet_node)
+
+ # Make sure spending AB then BC would generate a conflict for the latter
+
+ tx_a = await wallet.generate_signed_transaction(uint64(30), ph, coins={coins[0].coin})
+ tx_b = await wallet.generate_signed_transaction(uint64(30), ph, coins={coins[1].coin})
+ tx_c = await wallet.generate_signed_transaction(uint64(30), ph, coins={coins[2].coin})
+ assert tx_a.spend_bundle is not None
+ assert tx_b.spend_bundle is not None
+ assert tx_c.spend_bundle is not None
+ ab_bundle = SpendBundle.aggregate([tx_a.spend_bundle, tx_b.spend_bundle])
+ await send_to_mempool(full_node_api, ab_bundle)
+ # BC should conflict here (on B)
+ bc_bundle = SpendBundle.aggregate([tx_b.spend_bundle, tx_c.spend_bundle])
+ await send_to_mempool(full_node_api, bc_bundle, expecting_conflict=True)
+ await farm_a_block(full_node_api, wallet_node, ph)
+
+ # Make sure DE and EF would aggregate on E when E is eligible for deduplication
+
+ # Create a coin with the identity puzzle hash
+ tx = await wallet.generate_signed_transaction(uint64(200), IDENTITY_PUZZLE_HASH, coins={coins[3].coin})
+ assert tx.spend_bundle is not None
+ await send_to_mempool(full_node_api, tx.spend_bundle)
+ await farm_a_block(full_node_api, wallet_node, ph)
+ # Grab the coin we created and make an eligible coin out of it
+ coins_with_identity_ph = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(
+ False, IDENTITY_PUZZLE_HASH
+ )
+ sb = spend_bundle_from_conditions(
+ [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 110]], coins_with_identity_ph[0].coin
+ )
+ await send_to_mempool(full_node_api, sb)
+ await farm_a_block(full_node_api, wallet_node, ph)
+ # Grab the eligible coin to spend as E in DE and EF transactions
+ e_coin = (await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(False, IDENTITY_PUZZLE_HASH))[
+ 0
+ ].coin
+ e_coin_id = e_coin.name()
+ # Restrict spending E with an announcement to consume
+ message = b"Identical spend aggregation test"
+ e_announcement = Announcement(e_coin_id, message)
+ # Create transactions D and F that consume an announcement created by E
+ tx_d = await wallet.generate_signed_transaction(
+ uint64(100), ph, fee=uint64(0), coins={coins[4].coin}, coin_announcements_to_consume={e_announcement}
+ )
+ tx_f = await wallet.generate_signed_transaction(
+ uint64(150), ph, fee=uint64(0), coins={coins[5].coin}, coin_announcements_to_consume={e_announcement}
+ )
+ assert tx_d.spend_bundle is not None
+ assert tx_f.spend_bundle is not None
+ # Create transaction E now that spends e_coin to create another eligible
+ # coin as well as the announcement consumed by D and F
+ conditions: List[List[Any]] = [
+ [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 42],
+ [ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, message],
+ ]
+ sb_e = spend_bundle_from_conditions(conditions, e_coin)
+ # Send DE and EF combinations to the mempool
+ sb_de = SpendBundle.aggregate([tx_d.spend_bundle, sb_e])
+ sb_de_name = sb_de.name()
+ await send_to_mempool(full_node_api, sb_de)
+ sb_ef = SpendBundle.aggregate([sb_e, tx_f.spend_bundle])
+ sb_ef_name = sb_ef.name()
+ await send_to_mempool(full_node_api, sb_ef)
+ # Send also a transaction EG that spends E differently from DE and EF,
+ # so that it doesn't get deduplicated on E with them
+ conditions = [
+ [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, e_coin.amount - 1],
+ [ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, message],
+ ]
+ sb_e2 = spend_bundle_from_conditions(conditions, e_coin)
+ g_coin = coins[6].coin
+ g_coin_id = g_coin.name()
+ tx_g = await wallet.generate_signed_transaction(
+ uint64(13), ph, coins={g_coin}, coin_announcements_to_consume={e_announcement}
+ )
+ assert tx_g.spend_bundle is not None
+ sb_e2g = SpendBundle.aggregate([sb_e2, tx_g.spend_bundle])
+ sb_e2g_name = sb_e2g.name()
+ await send_to_mempool(full_node_api, sb_e2g)
+
+ # Make sure our coin IDs to spend bundles mappings are correct
+ assert get_sb_names_by_coin_id(full_node_api, coins[4].coin.name()) == {sb_de_name}
+ assert get_sb_names_by_coin_id(full_node_api, e_coin_id) == {sb_de_name, sb_ef_name, sb_e2g_name}
+ assert get_sb_names_by_coin_id(full_node_api, coins[5].coin.name()) == {sb_ef_name}
+ assert get_sb_names_by_coin_id(full_node_api, g_coin_id) == {sb_e2g_name}
+
+ await farm_a_block(full_node_api, wallet_node, ph)
+
+ # Make sure sb_de and sb_ef coins, including the deduplicated one, are removed
+ # from the coin IDs to spend bundles mappings with the creation of a new block
+ assert get_sb_names_by_coin_id(full_node_api, coins[4].coin.name()) == set()
+ assert get_sb_names_by_coin_id(full_node_api, e_coin_id) == set()
+ assert get_sb_names_by_coin_id(full_node_api, coins[5].coin.name()) == set()
+ assert get_sb_names_by_coin_id(full_node_api, g_coin_id) == set()
+
+ # Make sure coin G remains because E2G was removed as E got spent differently (by DE and EF)
+ coins_set = await wallet_node.wallet_state_manager.coin_store.get_unspent_coins_for_wallet(1)
+ assert g_coin in (c.coin for c in coins_set)
+ # Only the newly created eligible coin is left now
+ eligible_coins = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(
+ False, IDENTITY_PUZZLE_HASH
+ )
+ assert len(eligible_coins) == 1
+ assert eligible_coins[0].coin.amount == 42
diff --git a/tests/core/server/test_dos.py b/tests/core/server/test_dos.py
--- a/tests/core/server/test_dos.py
+++ b/tests/core/server/test_dos.py
@@ -171,8 +171,8 @@ async def test_spam_tx(self, setup_two_nodes_fixture, self_hostname):
ws_con: WSChiaConnection = list(server_1.all_connections.values())[0]
ws_con_2: WSChiaConnection = list(server_2.all_connections.values())[0]
- ws_con.peer_host = "1.2.3.4"
- ws_con_2.peer_host = "1.2.3.4"
+ ws_con.peer_info = PeerInfo("1.2.3.4", ws_con.peer_info.port)
+ ws_con_2.peer_info = PeerInfo("1.2.3.4", ws_con_2.peer_info.port)
new_tx_message = make_msg(
ProtocolMessageTypes.new_transaction,
@@ -226,8 +226,8 @@ async def test_spam_message_non_tx(self, setup_two_nodes_fixture, self_hostname)
ws_con: WSChiaConnection = list(server_1.all_connections.values())[0]
ws_con_2: WSChiaConnection = list(server_2.all_connections.values())[0]
- ws_con.peer_host = "1.2.3.4"
- ws_con_2.peer_host = "1.2.3.4"
+ ws_con.peer_info = PeerInfo("1.2.3.4", ws_con.peer_info.port)
+ ws_con_2.peer_info = PeerInfo("1.2.3.4", ws_con_2.peer_info.port)
def is_closed():
return ws_con.closed
@@ -275,8 +275,8 @@ async def test_spam_message_too_large(self, setup_two_nodes_fixture, self_hostna
ws_con: WSChiaConnection = list(server_1.all_connections.values())[0]
ws_con_2: WSChiaConnection = list(server_2.all_connections.values())[0]
- ws_con.peer_host = "1.2.3.4"
- ws_con_2.peer_host = "1.2.3.4"
+ ws_con.peer_info = PeerInfo("1.2.3.4", ws_con.peer_info.port)
+ ws_con_2.peer_info = PeerInfo("1.2.3.4", ws_con_2.peer_info.port)
def is_closed():
return ws_con.closed
diff --git a/tests/core/server/test_server.py b/tests/core/server/test_server.py
--- a/tests/core/server/test_server.py
+++ b/tests/core/server/test_server.py
@@ -3,10 +3,14 @@
from typing import Callable, Tuple
import pytest
+from packaging.version import Version
+from chia.cmds.init_funcs import chia_full_version_str
from chia.full_node.full_node_api import FullNodeAPI
+from chia.protocols.shared_protocol import protocol_version
from chia.server.server import ChiaServer
from chia.simulator.block_tools import BlockTools
+from chia.simulator.setup_nodes import SimulatorsAndWalletsServices
from chia.types.peer_info import PeerInfo
from chia.util.ints import uint16
from tests.connection_utils import connect_and_get_peer
@@ -35,3 +39,16 @@ async def test_connection_string_conversion(
converted = method(peer)
print(converted)
assert len(converted) < 1000
+
+
+@pytest.mark.asyncio
+async def test_connection_versions(
+ self_hostname: str, one_wallet_and_one_simulator_services: SimulatorsAndWalletsServices
+) -> None:
+ [full_node_service], [wallet_service], _ = one_wallet_and_one_simulator_services
+ wallet_node = wallet_service._node
+ await wallet_node.server.start_client(PeerInfo(self_hostname, uint16(full_node_service._api.server._port)), None)
+ connection = wallet_node.server.all_connections[full_node_service._node.server.node_id]
+ assert connection.protocol_version == Version(protocol_version)
+ assert connection.version == Version(chia_full_version_str())
+ assert connection.get_version() == chia_full_version_str()
diff --git a/tests/core/ssl/test_ssl.py b/tests/core/ssl/test_ssl.py
--- a/tests/core/ssl/test_ssl.py
+++ b/tests/core/ssl/test_ssl.py
@@ -28,7 +28,6 @@ async def establish_connection(server: ChiaServer, self_hostname: str, ssl_conte
server.log,
True,
server.received_message_callback,
- self_hostname,
None,
bytes32(b"\x00" * 32),
100,
diff --git a/tests/core/test_cost_calculation.py b/tests/core/test_cost_calculation.py
--- a/tests/core/test_cost_calculation.py
+++ b/tests/core/test_cost_calculation.py
@@ -85,10 +85,9 @@ async def test_basics(self, softfork_height, bt):
coin_spend = spend_bundle.coin_spends[0]
assert coin_spend.coin.name() == npc_result.conds.spends[0].coin_id
- error, puzzle, solution = get_puzzle_and_solution_for_coin(program, coin_spend.coin)
- assert error is None
- assert puzzle == coin_spend.puzzle_reveal
- assert solution == coin_spend.solution
+ spend_info = get_puzzle_and_solution_for_coin(program, coin_spend.coin)
+ assert spend_info.puzzle == coin_spend.puzzle_reveal
+ assert spend_info.solution == coin_spend.solution
clvm_cost = 404560
byte_cost = len(bytes(program.program)) * test_constants.COST_PER_BYTE
@@ -156,8 +155,8 @@ async def test_mempool_mode(self, softfork_height, bt):
bytes32.fromhex("14947eb0e69ee8fc8279190fc2d38cb4bbb61ba28f1a270cfd643a0e8d759576"),
300,
)
- error, puzzle, solution = get_puzzle_and_solution_for_coin(generator, coin)
- assert error is None
+ spend_info = get_puzzle_and_solution_for_coin(generator, coin)
+ assert spend_info.puzzle.to_program() == puzzle
@pytest.mark.asyncio
async def test_clvm_mempool_mode(self, softfork_height):
@@ -274,6 +273,5 @@ async def test_get_puzzle_and_solution_for_coin_performance():
with assert_runtime(seconds=7, label="get_puzzle_and_solution_for_coin"):
for i in range(3):
for c in spends:
- err, puzzle, solution = get_puzzle_and_solution_for_coin(generator, c)
- assert err is None
- assert puzzle.get_tree_hash() == c.puzzle_hash
+ spend_info = get_puzzle_and_solution_for_coin(generator, c)
+ assert spend_info.puzzle.get_tree_hash() == c.puzzle_hash
diff --git a/tests/core/util/test_log_exceptions.py b/tests/core/util/test_log_exceptions.py
new file mode 100644
--- /dev/null
+++ b/tests/core/util/test_log_exceptions.py
@@ -0,0 +1,187 @@
+from __future__ import annotations
+
+import contextlib
+import dataclasses
+import logging
+import re
+from typing import Tuple, Type, Union
+
+import pytest
+
+from chia.util.log_exceptions import log_exceptions
+
+log_message = "Some message that probably, hopefully, won't accidentally come from somewhere else"
+exception_message = "A message tied to the exception"
+
+
+@pytest.fixture(name="logger")
+def logger_fixture() -> logging.Logger:
+ return logging.getLogger(__name__)
+
+
+@dataclasses.dataclass
+class ErrorCase:
+ type_to_raise: Type[BaseException]
+ type_to_catch: Union[Type[BaseException], Tuple[Type[BaseException], ...]]
+ should_match: bool
+
+
+all_level_values = [
+ logging.CRITICAL,
+ logging.ERROR,
+ logging.WARNING,
+ logging.INFO,
+ logging.DEBUG,
+]
+all_levels = {logging.getLevelName(value): value for value in all_level_values}
+
+
+def test_consumes_exception(
+ logger: logging.Logger,
+ caplog: pytest.LogCaptureFixture,
+) -> None:
+ with log_exceptions(log=logger, consume=True):
+ raise Exception()
+
+
+def test_propagates_exception(
+ logger: logging.Logger,
+ caplog: pytest.LogCaptureFixture,
+) -> None:
+ with pytest.raises(Exception, match=re.escape(exception_message)):
+ with log_exceptions(log=logger, consume=False):
+ raise Exception(exception_message)
+
+
+def test_propagates_exception_by_default(
+ logger: logging.Logger,
+ caplog: pytest.LogCaptureFixture,
+) -> None:
+ with pytest.raises(Exception, match=re.escape(exception_message)):
+ with log_exceptions(log=logger):
+ raise Exception(exception_message)
+
+
+def test_passed_message_is_used(
+ logger: logging.Logger,
+ caplog: pytest.LogCaptureFixture,
+) -> None:
+ with log_exceptions(log=logger, consume=True, message=log_message):
+ raise Exception()
+
+ assert len(caplog.records) == 1, caplog.records
+
+ [record] = caplog.records
+ assert record.msg.startswith(f"{log_message}: ")
+
+
+@pytest.mark.parametrize(
+ argnames="level",
+ argvalues=all_levels.values(),
+ ids=all_levels.keys(),
+)
+def test_specified_level_is_used(
+ logger: logging.Logger,
+ caplog: pytest.LogCaptureFixture,
+ level: int,
+) -> None:
+ caplog.set_level(min(all_levels.values()))
+ with log_exceptions(level=level, log=logger, consume=True):
+ raise Exception()
+
+ assert len(caplog.records) == 1, caplog.records
+
+ [record] = caplog.records
+ assert record.levelno == level
+
+
+def test_traceback_is_logged(
+ logger: logging.Logger,
+ caplog: pytest.LogCaptureFixture,
+) -> None:
+ with log_exceptions(log=logger, consume=True, show_traceback=True):
+ raise Exception()
+
+ assert len(caplog.records) == 1, caplog.records
+
+ [record] = caplog.records
+ assert "\nTraceback " in record.msg
+
+
+def test_traceback_is_not_logged(
+ logger: logging.Logger,
+ caplog: pytest.LogCaptureFixture,
+) -> None:
+ with log_exceptions(log=logger, consume=True, show_traceback=False):
+ raise Exception()
+
+ assert len(caplog.records) == 1, caplog.records
+
+ [record] = caplog.records
+ assert "\nTraceback " not in record.msg
+
+
+@pytest.mark.parametrize(
+ argnames="case",
+ argvalues=[
+ # default exceptions to catch matching
+ ErrorCase(type_to_raise=Exception, type_to_catch=Exception, should_match=True),
+ ErrorCase(type_to_raise=OSError, type_to_catch=Exception, should_match=True),
+ # default exceptions to catch not matching
+ ErrorCase(type_to_raise=BaseException, type_to_catch=Exception, should_match=False),
+ # raised type the same as specified to catch
+ ErrorCase(type_to_raise=Exception, type_to_catch=Exception, should_match=True),
+ ErrorCase(type_to_raise=BaseException, type_to_catch=BaseException, should_match=True),
+ ErrorCase(type_to_raise=OSError, type_to_catch=OSError, should_match=True),
+ # raised type is subclass of to catch
+ ErrorCase(type_to_raise=AttributeError, type_to_catch=Exception, should_match=True),
+ ErrorCase(type_to_raise=KeyboardInterrupt, type_to_catch=BaseException, should_match=True),
+ ErrorCase(type_to_raise=FileExistsError, type_to_catch=OSError, should_match=True),
+ # multiple to catch matching
+ ErrorCase(type_to_raise=OSError, type_to_catch=(KeyboardInterrupt, Exception), should_match=True),
+ ErrorCase(type_to_raise=SystemExit, type_to_catch=(SystemExit, OSError), should_match=True),
+ # multiple to catch not matching
+ ErrorCase(type_to_raise=AttributeError, type_to_catch=(KeyError, TimeoutError), should_match=False),
+ ErrorCase(type_to_raise=KeyboardInterrupt, type_to_catch=(KeyError, TimeoutError), should_match=False),
+ ],
+)
+@pytest.mark.parametrize(argnames="consume", argvalues=[False, True], ids=["propagates", "consumes"])
+@pytest.mark.parametrize(argnames="show_traceback", argvalues=[False, True], ids=["no traceback", "with traceback"])
+def test_well_everything(
+ logger: logging.Logger,
+ caplog: pytest.LogCaptureFixture,
+ consume: bool,
+ case: ErrorCase,
+ show_traceback: bool,
+) -> None:
+ with contextlib.ExitStack() as exit_stack:
+ if not consume or not case.should_match:
+ # verify that the exception propagates either when it should not match or should not be consumed
+ exit_stack.enter_context(pytest.raises(case.type_to_raise, match=re.escape(exception_message)))
+
+ with log_exceptions(
+ message=log_message,
+ log=logger,
+ consume=consume,
+ show_traceback=show_traceback,
+ exceptions_to_process=case.type_to_catch,
+ ):
+ to_raise = case.type_to_raise(exception_message)
+ raise to_raise
+
+ if not case.should_match:
+ assert len(caplog.records) == 0, caplog.records
+ else:
+ # verify there is only a single log record
+ assert len(caplog.records) == 1, caplog.records
+
+ [record] = caplog.records
+ expected = f"{log_message}: {case.type_to_raise.__name__}: {exception_message}"
+
+ if show_traceback:
+ expected += "\nTraceback "
+ # verify the beginning of the log message, the traceback is not fully verified
+ assert record.msg.startswith(expected)
+ else:
+ # verify the complete log message
+ assert record.msg == expected
diff --git a/tests/farmer_harvester/test_farmer_harvester.py b/tests/farmer_harvester/test_farmer_harvester.py
--- a/tests/farmer_harvester/test_farmer_harvester.py
+++ b/tests/farmer_harvester/test_farmer_harvester.py
@@ -6,7 +6,7 @@
from chia.farmer.farmer import Farmer
from chia.simulator.time_out_assert import time_out_assert
-from chia.types.peer_info import PeerInfo
+from chia.types.peer_info import UnresolvedPeerInfo
from chia.util.keychain import generate_mnemonic
@@ -18,6 +18,7 @@ def farmer_is_started(farmer):
async def test_start_with_empty_keychain(farmer_one_harvester_not_started):
_, farmer_service, bt = farmer_one_harvester_not_started
farmer: Farmer = farmer_service._node
+ farmer_service.reconnect_retry_seconds = 1
# First remove all keys from the keychain
bt.local_keychain.delete_all_keys()
# Make sure the farmer service is not initialized yet
@@ -42,6 +43,9 @@ async def test_harvester_handshake(farmer_one_harvester_not_started):
harvester = harvester_service._node
farmer = farmer_service._node
+ farmer_service.reconnect_retry_seconds = 1
+ harvester_service.reconnect_retry_seconds = 1
+
def farmer_has_connections():
return len(farmer.server.get_connections()) > 0
@@ -60,7 +64,7 @@ async def handshake_done() -> bool:
# Start both services and wait a bit
await farmer_service.start()
await harvester_service.start()
- harvester_service.add_peer(PeerInfo(str(farmer_service.self_hostname), farmer_service._server.get_port()))
+ harvester_service.add_peer(UnresolvedPeerInfo(str(farmer_service.self_hostname), farmer_service._server.get_port()))
# Handshake task should be started but the handshake should not be done
await time_out_assert(5, handshake_task_active, True)
assert not await handshake_done()
@@ -76,7 +80,7 @@ async def handshake_done() -> bool:
assert len(harvester.plot_manager.farmer_public_keys) == 0
# Re-start the harvester and make sure the handshake task gets started but the handshake still doesn't go through
await harvester_service.start()
- harvester_service.add_peer(PeerInfo(str(farmer_service.self_hostname), farmer_service._server.get_port()))
+ harvester_service.add_peer(UnresolvedPeerInfo(str(farmer_service.self_hostname), farmer_service._server.get_port()))
await time_out_assert(5, handshake_task_active, True)
assert not await handshake_done()
# Stop the farmer and make sure the handshake_task doesn't block the shutdown
diff --git a/tests/fee_estimation/test_fee_estimation_integration.py b/tests/fee_estimation/test_fee_estimation_integration.py
--- a/tests/fee_estimation/test_fee_estimation_integration.py
+++ b/tests/fee_estimation/test_fee_estimation_integration.py
@@ -38,7 +38,7 @@ def make_mempoolitem() -> MempoolItem:
ph = wallet_tool.get_new_puzzlehash()
coin = Coin(ph, ph, uint64(10000))
spend_bundle = wallet_tool.generate_signed_transaction(uint64(10000), ph, coin)
- cost = uint64(5000000)
+ cost = uint64(1000000)
block_height = 1
fee = uint64(10000000)
diff --git a/tests/generator/test_compression.py b/tests/generator/test_compression.py
--- a/tests/generator/test_compression.py
+++ b/tests/generator/test_compression.py
@@ -4,8 +4,8 @@
import io
from dataclasses import dataclass
from typing import Any, List
-from unittest import TestCase
+import pytest
from clvm import SExp
from clvm.serialize import sexp_from_stream
from clvm_tools import binutils
@@ -18,7 +18,6 @@
simple_solution_generator,
spend_bundle_to_serialized_coin_spend_entry_list,
)
-from chia.full_node.generator import create_generator_args, run_generator_unsafe
from chia.full_node.mempool_check_conditions import get_puzzle_and_solution_for_coin
from chia.types.blockchain_format.program import INFINITE_COST, Program
from chia.types.blockchain_format.serialized_program import SerializedProgram
@@ -28,18 +27,19 @@
from chia.util.ints import uint32
from chia.wallet.puzzles.load_clvm import load_clvm
from tests.core.make_block_generator import make_spend_bundle
+from tests.generator.test_rom import run_generator
-TEST_GEN_DESERIALIZE = load_clvm("test_generator_deserialize.clvm", package_or_requirement="chia.wallet.puzzles")
-DESERIALIZE_MOD = load_clvm("chialisp_deserialisation.clvm", package_or_requirement="chia.wallet.puzzles")
+TEST_GEN_DESERIALIZE = load_clvm("test_generator_deserialize.clsp", package_or_requirement="chia.wallet.puzzles")
+DESERIALIZE_MOD = load_clvm("chialisp_deserialisation.clsp", package_or_requirement="chia.wallet.puzzles")
-DECOMPRESS_PUZZLE = load_clvm("decompress_puzzle.clvm", package_or_requirement="chia.wallet.puzzles")
-DECOMPRESS_CSE = load_clvm("decompress_coin_spend_entry.clvm", package_or_requirement="chia.wallet.puzzles")
+DECOMPRESS_PUZZLE = load_clvm("decompress_puzzle.clsp", package_or_requirement="chia.wallet.puzzles")
+DECOMPRESS_CSE = load_clvm("decompress_coin_spend_entry.clsp", package_or_requirement="chia.wallet.puzzles")
DECOMPRESS_CSE_WITH_PREFIX = load_clvm(
- "decompress_coin_spend_entry_with_prefix.clvm", package_or_requirement="chia.wallet.puzzles"
+ "decompress_coin_spend_entry_with_prefix.clsp", package_or_requirement="chia.wallet.puzzles"
)
-DECOMPRESS_BLOCK = load_clvm("block_program_zero.clvm", package_or_requirement="chia.wallet.puzzles")
-TEST_MULTIPLE = load_clvm("test_multiple_generator_input_arguments.clvm", package_or_requirement="chia.wallet.puzzles")
+DECOMPRESS_BLOCK = load_clvm("block_program_zero.clsp", package_or_requirement="chia.wallet.puzzles")
+TEST_MULTIPLE = load_clvm("test_multiple_generator_input_arguments.clsp", package_or_requirement="chia.wallet.puzzles")
Nil = Program.from_bytes(b"\x80")
@@ -99,17 +99,21 @@ def spend_bundle_to_coin_spend_entry_list(bundle: SpendBundle) -> List[Any]:
return r
-class TestCompression(TestCase):
- def test_spend_bundle_suitable(self):
+class TestCompression:
+ def test_spend_bundle_suitable(self) -> None:
sb: SpendBundle = make_spend_bundle(1)
assert bundle_suitable_for_compression(sb)
- def test_compress_spend_bundle(self):
+ def test_compress_spend_bundle(self) -> None:
pass
- def test_multiple_input_gen_refs(self):
- start1, end1 = match_standard_transaction_at_any_index(gen1)
- start2, end2 = match_standard_transaction_at_any_index(gen2)
+ def test_multiple_input_gen_refs(self) -> None:
+ match = match_standard_transaction_at_any_index(gen1)
+ assert match is not None
+ start1, end1 = match
+ match = match_standard_transaction_at_any_index(gen2)
+ assert match is not None
+ start2, end2 = match
ca1 = CompressorArg(FAKE_BLOCK_HEIGHT1, SerializedProgram.from_bytes(gen1), start1, end1)
ca2 = CompressorArg(FAKE_BLOCK_HEIGHT2, SerializedProgram.from_bytes(gen2), start2, end2)
@@ -122,44 +126,48 @@ def test_multiple_input_gen_refs(self):
gen_args = MultipleCompressorArg([ca1, ca2], split_offset)
spend_bundle: SpendBundle = make_spend_bundle(1)
multi_gen = create_multiple_ref_generator(gen_args, spend_bundle)
- cost, result = run_generator_unsafe(multi_gen, INFINITE_COST)
+ cost, result = run_generator(multi_gen)
results.append(result)
assert result is not None
assert cost > 0
assert all(r == results[0] for r in results)
- def test_compressed_block_results(self):
+ def test_compressed_block_results(self) -> None:
sb: SpendBundle = make_spend_bundle(1)
- start, end = match_standard_transaction_at_any_index(original_generator)
+ match = match_standard_transaction_at_any_index(original_generator)
+ assert match is not None
+ start, end = match
ca = CompressorArg(uint32(0), SerializedProgram.from_bytes(original_generator), start, end)
c = compressed_spend_bundle_solution(ca, sb)
s = simple_solution_generator(sb)
assert c != s
- cost_c, result_c = run_generator_unsafe(c, INFINITE_COST)
- cost_s, result_s = run_generator_unsafe(s, INFINITE_COST)
+ cost_c, result_c = run_generator(c)
+ cost_s, result_s = run_generator(s)
+ print()
print(result_c)
assert result_c is not None
assert result_s is not None
+ print(result_s)
assert result_c == result_s
- def test_get_removals_for_single_coin(self):
+ def test_get_removals_for_single_coin(self) -> None:
sb: SpendBundle = make_spend_bundle(1)
- start, end = match_standard_transaction_at_any_index(original_generator)
+ match = match_standard_transaction_at_any_index(original_generator)
+ assert match is not None
+ start, end = match
ca = CompressorArg(uint32(0), SerializedProgram.from_bytes(original_generator), start, end)
c = compressed_spend_bundle_solution(ca, sb)
removal = sb.coin_spends[0].coin
- error, puzzle, solution = get_puzzle_and_solution_for_coin(c, removal)
- assert error is None
- assert bytes(puzzle) == bytes(sb.coin_spends[0].puzzle_reveal)
- assert bytes(solution) == bytes(sb.coin_spends[0].solution)
+ spend_info = get_puzzle_and_solution_for_coin(c, removal)
+ assert bytes(spend_info.puzzle) == bytes(sb.coin_spends[0].puzzle_reveal)
+ assert bytes(spend_info.solution) == bytes(sb.coin_spends[0].solution)
# Test non compressed generator as well
s = simple_solution_generator(sb)
- error, puzzle, solution = get_puzzle_and_solution_for_coin(s, removal)
- assert error is None
- assert bytes(puzzle) == bytes(sb.coin_spends[0].puzzle_reveal)
- assert bytes(solution) == bytes(sb.coin_spends[0].solution)
+ spend_info = get_puzzle_and_solution_for_coin(s, removal)
+ assert bytes(spend_info.puzzle) == bytes(sb.coin_spends[0].puzzle_reveal)
+ assert bytes(spend_info.solution) == bytes(sb.coin_spends[0].solution)
- def test_spend_byndle_coin_spend(self):
+ def test_spend_byndle_coin_spend(self) -> None:
for i in range(0, 10):
sb: SpendBundle = make_spend_bundle(i)
cs1 = SExp.to(spend_bundle_to_coin_spend_entry_list(sb)).as_bin() # pylint: disable=E1101
@@ -167,18 +175,12 @@ def test_spend_byndle_coin_spend(self):
assert cs1 == cs2
-class TestDecompression(TestCase):
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
- self.maxDiff = None
-
- def test_deserialization(self):
- self.maxDiff = None
+class TestDecompression:
+ def test_deserialization(self) -> None:
cost, out = DESERIALIZE_MOD.run_with_cost(INFINITE_COST, [bytes(Program.to("hello"))])
assert out == Program.to("hello")
- def test_deserialization_as_argument(self):
- self.maxDiff = None
+ def test_deserialization_as_argument(self) -> None:
cost, out = TEST_GEN_DESERIALIZE.run_with_cost(
INFINITE_COST, [DESERIALIZE_MOD, Nil, bytes(Program.to("hello"))]
)
@@ -187,7 +189,7 @@ def test_deserialization_as_argument(self):
print(out)
assert out == Program.to("hello")
- def test_decompress_puzzle(self):
+ def test_decompress_puzzle(self) -> None:
cost, out = DECOMPRESS_PUZZLE.run_with_cost(
INFINITE_COST, [DESERIALIZE_MOD, b"\xff", bytes(Program.to("pubkey")), b"\x80"]
)
@@ -202,11 +204,11 @@ def test_decompress_puzzle(self):
# print()
# print(out)
- def test_decompress_cse(self):
+ def test_decompress_cse(self) -> None:
"""Decompress a single CSE / CoinSpendEntry"""
- cse0 = binutils.assemble(
+ cse0 = binutils.assemble( # type: ignore[no-untyped-call]
"((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0) (0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3 (() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ())))"
- ) # noqa
+ )
cost, out = DECOMPRESS_CSE.run_with_cost(
INFINITE_COST, [DESERIALIZE_MOD, DECOMPRESS_PUZZLE, b"\xff", b"\x80", cse0]
)
@@ -214,10 +216,10 @@ def test_decompress_cse(self):
print()
print(out)
- def test_decompress_cse_with_prefix(self):
- cse0 = binutils.assemble(
+ def test_decompress_cse_with_prefix(self) -> None:
+ cse0 = binutils.assemble( # type: ignore[no-untyped-call]
"((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0) (0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3 (() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ())))"
- ) # noqa
+ )
start = 2 + 44
end = start + 238
@@ -230,13 +232,12 @@ def test_decompress_cse_with_prefix(self):
print()
print(out)
- def test_block_program_zero(self):
+ def test_block_program_zero(self) -> None:
"Decompress a list of CSEs"
- self.maxDiff = None
- cse1 = binutils.assemble(
+ cse1 = binutils.assemble( # type: ignore[no-untyped-call]
"(((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0) (0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3 (() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ()))))"
- ) # noqa
- cse2 = binutils.assemble(
+ )
+ cse2 = binutils.assemble( # type: ignore[no-untyped-call]
"""
(
((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0)
@@ -250,7 +251,7 @@ def test_block_program_zero(self):
)
"""
- ) # noqa
+ )
start = 2 + 44
end = start + 238
@@ -273,12 +274,11 @@ def test_block_program_zero(self):
print()
print(out)
- def test_block_program_zero_with_curry(self):
- self.maxDiff = None
- cse1 = binutils.assemble(
+ def test_block_program_zero_with_curry(self) -> None:
+ cse1 = binutils.assemble( # type: ignore[no-untyped-call]
"(((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0) (0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3 (() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ()))))"
- ) # noqa
- cse2 = binutils.assemble(
+ )
+ cse2 = binutils.assemble( # type: ignore[no-untyped-call]
"""
(
((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0)
@@ -292,7 +292,7 @@ def test_block_program_zero_with_curry(self):
)
"""
- ) # noqa
+ )
start = 2 + 44
end = start + 238
@@ -309,7 +309,7 @@ def test_block_program_zero_with_curry(self):
p_with_cses = DECOMPRESS_BLOCK.curry(
DECOMPRESS_PUZZLE, DECOMPRESS_CSE_WITH_PREFIX, start, Program.to(end), cse2, DESERIALIZE_MOD
)
- generator_args = create_generator_args([SerializedProgram.from_bytes(original_generator)])
+ generator_args = Program.to([[original_generator]])
cost, out = p_with_cses.run_with_cost(INFINITE_COST, generator_args)
print()
diff --git a/tests/generator/test_generator_types.py b/tests/generator/test_generator_types.py
--- a/tests/generator/test_generator_types.py
+++ b/tests/generator/test_generator_types.py
@@ -3,7 +3,7 @@
from typing import Dict
from unittest import TestCase
-from chia.full_node.generator import create_block_generator, create_generator_args
+from chia.full_node.generator import create_block_generator
from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.serialized_program import SerializedProgram
from chia.types.generator_types import GeneratorBlockCacheInterface
@@ -37,18 +37,16 @@ def get_generator_for_block_height(self, index: uint32) -> SerializedProgram:
class TestGeneratorTypes(TestCase):
- def test_make_generator(self):
+ def test_make_generator(self) -> None:
block_dict = BlockDict({uint32(1): gen1})
gen = create_block_generator(gen2, [uint32(1)], block_dict)
print(gen)
- def test_make_generator_args(self):
- generator_ref_list = [gen1]
- gen_args = create_generator_args(generator_ref_list)
- gen_args_as_program = Program.from_bytes(bytes(gen_args))
+ def test_make_generator_args(self) -> None:
+ gen_args = Program.to([[bytes(gen1)]])
# First Argument to the block generator is the first template generator
- arg2 = gen_args_as_program.first().first()
+ arg2 = gen_args.first().first()
print(arg2)
assert arg2 == bytes(gen1)
diff --git a/tests/generator/test_rom.py b/tests/generator/test_rom.py
--- a/tests/generator/test_rom.py
+++ b/tests/generator/test_rom.py
@@ -1,12 +1,11 @@
from __future__ import annotations
-from typing import List
+from typing import List, Tuple
from clvm_tools import binutils
from clvm_tools.clvmc import compile_clvm_text
from chia.consensus.condition_costs import ConditionCost
-from chia.full_node.generator import run_generator_unsafe
from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions
from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.serialized_program import SerializedProgram
@@ -15,12 +14,13 @@
from chia.types.spend_bundle_conditions import ELIGIBLE_FOR_DEDUP, Spend
from chia.util.ints import uint32
from chia.wallet.puzzles.load_clvm import load_clvm
+from chia.wallet.puzzles.rom_bootstrap_generator import GENERATOR_MOD
MAX_COST = int(1e15)
COST_PER_BYTE = int(12000)
-DESERIALIZE_MOD = load_clvm("chialisp_deserialisation.clvm", package_or_requirement="chia.wallet.puzzles")
+DESERIALIZE_MOD = load_clvm("chialisp_deserialisation.clsp", package_or_requirement="chia.wallet.puzzles")
GENERATOR_CODE = """
@@ -40,34 +40,22 @@
"""
-COMPILED_GENERATOR_CODE = bytes.fromhex(
- "ff02ffff01ff04ffff02ff04ffff04ff02ffff04ff05ffff04ff0bff8080808080ffff02"
- "ff06ffff04ff02ffff04ff05ffff04ff0bff808080808080ffff04ffff01ffff02ff05ff"
- "1380ff02ff05ff2b80ff018080"
-)
-
-COMPILED_GENERATOR_CODE = bytes(Program.to(compile_clvm_text(GENERATOR_CODE, [])))
-
-FIRST_GENERATOR = Program.to(
- binutils.assemble('((parent_id (c 1 (q "puzzle blob")) 50000 "solution is here" extra data for coin))')
-).as_bin()
-
-SECOND_GENERATOR = Program.to(binutils.assemble("(extra data for block)")).as_bin()
-
+COMPILED_GENERATOR_CODE = bytes(Program.to(compile_clvm_text(GENERATOR_CODE, []))) # type: ignore[no-untyped-call]
FIRST_GENERATOR = Program.to(
- binutils.assemble(
+ binutils.assemble( # type: ignore[no-untyped-call]
"""
((0x0000000000000000000000000000000000000000000000000000000000000000 1 50000
- ((51 0x0000000000000000000000000000000000000000000000000000000000000001 500)) "extra" "data" "for" "coin" ))"""
+ ((51 0x0000000000000000000000000000000000000000000000000000000000000001 500))
+ "extra" "data" "for" "coin" ))"""
)
).as_bin()
-SECOND_GENERATOR = Program.to(binutils.assemble("(extra data for block)")).as_bin()
+SECOND_GENERATOR = Program.to(binutils.assemble("(extra data for block)")).as_bin() # type: ignore[no-untyped-call]
-def to_sp(sexp) -> SerializedProgram:
- return SerializedProgram.from_bytes(bytes(sexp))
+def to_sp(sexp: bytes) -> SerializedProgram:
+ return SerializedProgram.from_bytes(sexp)
def block_generator() -> BlockGenerator:
@@ -86,6 +74,12 @@ def block_generator() -> BlockGenerator:
)
+def run_generator(self: BlockGenerator) -> Tuple[int, Program]:
+ """This mode is meant for accepting possibly soft-forked transactions into the mempool"""
+ args = Program.to([[bytes(g) for g in self.generator_refs]])
+ return GENERATOR_MOD.run_with_cost(MAX_COST, self.program, args)
+
+
def as_atom_list(prg: Program) -> List[bytes]:
"""
Pretend `prg` is a list of atoms. Return the corresponding
@@ -110,7 +104,7 @@ def as_atom_list(prg: Program) -> List[bytes]:
class TestROM:
- def test_rom_inputs(self):
+ def test_rom_inputs(self) -> None:
# this test checks that the generator just works
# It's useful for debugging the generator prior to having the ROM invoke it.
@@ -120,18 +114,21 @@ def test_rom_inputs(self):
assert cost == EXPECTED_ABBREVIATED_COST
assert r.as_bin().hex() == EXPECTED_OUTPUT
- def test_get_name_puzzle_conditions(self, softfork_height):
+ def test_get_name_puzzle_conditions(self, softfork_height: int) -> None:
# this tests that extra block or coin data doesn't confuse `get_name_puzzle_conditions`
gen = block_generator()
- cost, r = run_generator_unsafe(gen, max_cost=MAX_COST)
+ cost, r = run_generator(gen)
print(r)
- npc_result = get_name_puzzle_conditions(gen, max_cost=MAX_COST, mempool_mode=False, height=softfork_height)
+ npc_result = get_name_puzzle_conditions(
+ gen, max_cost=MAX_COST, mempool_mode=False, height=uint32(softfork_height)
+ )
assert npc_result.error is None
assert npc_result.cost == EXPECTED_COST + ConditionCost.CREATE_COIN.value + (
len(bytes(gen.program)) * COST_PER_BYTE
)
+ assert npc_result.conds is not None
spend = Spend(
coin_id=bytes32.fromhex("e8538c2d14f2a7defae65c5c97f5d4fae7ee64acef7fec9d28ad847a0880fd03"),
@@ -149,20 +146,20 @@ def test_get_name_puzzle_conditions(self, softfork_height):
assert npc_result.conds.spends == [spend]
- def test_coin_extras(self):
+ def test_coin_extras(self) -> None:
# the ROM supports extra data after a coin. This test checks that it actually gets passed through
gen = block_generator()
- cost, r = run_generator_unsafe(gen, max_cost=MAX_COST)
+ cost, r = run_generator(gen)
coin_spends = r.first()
for coin_spend in coin_spends.as_iter():
extra_data = coin_spend.rest().rest().rest().rest()
assert as_atom_list(extra_data) == b"extra data for coin".split()
- def test_block_extras(self):
+ def test_block_extras(self) -> None:
# the ROM supports extra data after the coin spend list. This test checks that it actually gets passed through
gen = block_generator()
- cost, r = run_generator_unsafe(gen, max_cost=MAX_COST)
+ cost, r = run_generator(gen)
extra_block_data = r.rest()
assert as_atom_list(extra_block_data) == b"extra data for block".split()
diff --git a/tests/plot_sync/test_plot_sync.py b/tests/plot_sync/test_plot_sync.py
--- a/tests/plot_sync/test_plot_sync.py
+++ b/tests/plot_sync/test_plot_sync.py
@@ -299,6 +299,7 @@ def new_test_dir(name: str, plot_list: List[Path]) -> Directory:
file.write(bytes(100))
harvester_services, farmer_service, bt = farmer_two_harvester_not_started
+ farmer_service.reconnect_retry_seconds = 1
farmer: Farmer = farmer_service._node
await farmer_service.start()
harvesters: List[Harvester] = [
diff --git a/tests/plot_sync/test_receiver.py b/tests/plot_sync/test_receiver.py
--- a/tests/plot_sync/test_receiver.py
+++ b/tests/plot_sync/test_receiver.py
@@ -247,8 +247,8 @@ async def test_to_dict(counts_only: bool) -> None:
assert plot_sync_dict_1["last_sync_time"] is None
assert plot_sync_dict_1["connection"] == {
"node_id": receiver.connection().peer_node_id,
- "host": receiver.connection().peer_host,
- "port": receiver.connection().peer_port,
+ "host": receiver.connection().peer_info.host,
+ "port": receiver.connection().peer_info.port,
}
# We should get equal dicts
diff --git a/tests/plot_sync/util.py b/tests/plot_sync/util.py
--- a/tests/plot_sync/util.py
+++ b/tests/plot_sync/util.py
@@ -13,16 +13,15 @@
from chia.server.start_service import Service
from chia.simulator.time_out_assert import time_out_assert
from chia.types.blockchain_format.sized_bytes import bytes32
-from chia.types.peer_info import PeerInfo
-from chia.util.ints import uint64
+from chia.types.peer_info import PeerInfo, UnresolvedPeerInfo
+from chia.util.ints import uint16, uint64
@dataclass
class WSChiaConnectionDummy:
connection_type: NodeType
peer_node_id: bytes32
- peer_host: str = "localhost"
- peer_port: int = 0
+ peer_info: PeerInfo = PeerInfo("127.0.0.1", uint16(0))
last_sent_message: Optional[Message] = None
async def send_message(self, message: Message) -> None:
@@ -41,8 +40,9 @@ async def start_harvester_service(harvester_service: Service[Harvester], farmer_
# Set the `last_refresh_time` of the plot manager to avoid initial plot loading
harvester: Harvester = harvester_service._node
harvester.plot_manager.last_refresh_time = time.time()
+ harvester_service.reconnect_retry_seconds = 1
await harvester_service.start()
- harvester_service.add_peer(PeerInfo(str(farmer_service.self_hostname), farmer_service._server.get_port()))
+ harvester_service.add_peer(UnresolvedPeerInfo(str(farmer_service.self_hostname), farmer_service._server.get_port()))
harvester.plot_manager.stop_refreshing()
assert harvester.plot_sync_sender._sync_id == 0
diff --git a/tests/pools/test_pool_puzzles_lifecycle.py b/tests/pools/test_pool_puzzles_lifecycle.py
--- a/tests/pools/test_pool_puzzles_lifecycle.py
+++ b/tests/pools/test_pool_puzzles_lifecycle.py
@@ -46,8 +46,8 @@
"""
This test suite aims to test:
- chia.pools.pool_puzzles.py
- - chia.wallet.puzzles.pool_member_innerpuz.clvm
- - chia.wallet.puzzles.pool_waiting_room_innerpuz.clvm
+ - chia.wallet.puzzles.pool_member_innerpuz.clsp
+ - chia.wallet.puzzles.pool_waiting_room_innerpuz.clsp
"""
diff --git a/tests/pools/test_pool_rpc.py b/tests/pools/test_pool_rpc.py
--- a/tests/pools/test_pool_rpc.py
+++ b/tests/pools/test_pool_rpc.py
@@ -63,16 +63,15 @@ async def manage_temporary_pool_plot(
with tempfile.TemporaryDirectory() as tmpdir:
tmp_path: Path = Path(tmpdir)
bt.add_plot_directory(tmp_path)
- plot_id = await bt.new_plot(p2_singleton_puzzle_hash, tmp_path, tmp_dir=tmp_path)
- assert plot_id is not None
- await bt.refresh_plots()
+ bt_plot = await bt.new_plot(p2_singleton_puzzle_hash, tmp_path, tmp_dir=tmp_path)
+ try:
+ await bt.refresh_plots()
- plot = TemporaryPoolPlot(bt=bt, p2_singleton_puzzle_hash=p2_singleton_puzzle_hash, plot_id=plot_id)
+ plot = TemporaryPoolPlot(bt=bt, p2_singleton_puzzle_hash=p2_singleton_puzzle_hash, plot_id=bt_plot.plot_id)
- try:
yield plot
finally:
- await bt.delete_plot(plot_id)
+ await bt.delete_plot(bt_plot.plot_id)
PREFARMED_BLOCKS = 4
diff --git a/tests/pools/test_wallet_pool_store.py b/tests/pools/test_wallet_pool_store.py
--- a/tests/pools/test_wallet_pool_store.py
+++ b/tests/pools/test_wallet_pool_store.py
@@ -1,7 +1,8 @@
from __future__ import annotations
+from dataclasses import dataclass, field
from secrets import token_bytes
-from typing import Optional
+from typing import Dict, List, Optional
import pytest
from clvm_tools import binutils
@@ -11,18 +12,19 @@
from chia.types.blockchain_format.serialized_program import SerializedProgram
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.coin_spend import CoinSpend, compute_additions
-from chia.util.ints import uint64
+from chia.util.ints import uint32, uint64
from chia.wallet.wallet_pool_store import WalletPoolStore
from tests.util.db_connection import DBConnection
-def make_child_solution(coin_spend: CoinSpend, new_coin: Optional[Coin] = None) -> CoinSpend:
+def make_child_solution(coin_spend: Optional[CoinSpend], new_coin: Optional[Coin] = None) -> CoinSpend:
new_puzzle_hash: bytes32 = bytes32(token_bytes(32))
solution = "()"
puzzle = f"(q . ((51 0x{new_puzzle_hash.hex()} 1)))"
puzzle_prog = Program.to(binutils.assemble(puzzle))
solution_prog = Program.to(binutils.assemble(solution))
if new_coin is None:
+ assert coin_spend is not None
new_coin = compute_additions(coin_spend)[0]
sol: CoinSpend = CoinSpend(
new_coin,
@@ -32,6 +34,27 @@ def make_child_solution(coin_spend: CoinSpend, new_coin: Optional[Coin] = None)
return sol
+async def assert_db_spends(store: WalletPoolStore, wallet_id: int, spends: List[CoinSpend]) -> None:
+ db_spends = await store.get_spends_for_wallet(wallet_id)
+ assert len(db_spends) == len(spends)
+ for spend, (_, db_spend) in zip(spends, db_spends):
+ assert spend == db_spend
+
+
+@dataclass
+class DummySpends:
+ spends_per_wallet: Dict[int, List[CoinSpend]] = field(default_factory=dict)
+
+ def generate(self, wallet_id: int, count: int) -> None:
+ current = self.spends_per_wallet.setdefault(wallet_id, [])
+ for _ in range(count):
+ coin = None
+ last_spend = None if len(current) == 0 else current[-1]
+ if last_spend is None:
+ coin = Coin(token_bytes(32), token_bytes(32), uint64(12312))
+ current.append(make_child_solution(last_spend, coin))
+
+
class TestWalletPoolStore:
@pytest.mark.asyncio
async def test_store(self):
@@ -107,3 +130,23 @@ async def test_store(self):
await store.add_spend(1, solution_5, 105)
await store.rollback(99, 1)
assert await store.get_spends_for_wallet(1) == []
+
+
+@pytest.mark.asyncio
+async def test_delete_wallet() -> None:
+ dummy_spends = DummySpends()
+ for i in range(5):
+ dummy_spends.generate(i, i * 5)
+ async with DBConnection(1) as db_wrapper:
+ store = await WalletPoolStore.create(db_wrapper)
+ # Add the spends per wallet and verify them
+ for wallet_id, spends in dummy_spends.spends_per_wallet.items():
+ for i, spend in enumerate(spends):
+ await store.add_spend(wallet_id, spend, uint32(i + wallet_id))
+ await assert_db_spends(store, wallet_id, spends)
+ # Remove one wallet after the other and verify before and after each
+ for wallet_id, spends in dummy_spends.spends_per_wallet.items():
+ # Assert the existence again here to make sure the previous removals did not affect other wallet_ids
+ await assert_db_spends(store, wallet_id, spends)
+ await store.delete_wallet(wallet_id)
+ await assert_db_spends(store, wallet_id, [])
diff --git a/tests/simulation/test_start_simulator.py b/tests/simulation/test_start_simulator.py
--- a/tests/simulation/test_start_simulator.py
+++ b/tests/simulation/test_start_simulator.py
@@ -91,7 +91,7 @@ async def test_start_simulator(
# We can ignore this timeout as long as the subsequent tests pass
try:
await simulator_rpc_client.reorg_blocks(2) # fork point 2 blocks, now height is 5
- except asyncio.exceptions.TimeoutError:
+ except asyncio.TimeoutError:
pass # ignore this error and hope the reorg is going ahead
# wait up to 5 mins
diff --git a/tests/util/alert_server.py b/tests/util/alert_server.py
deleted file mode 100644
--- a/tests/util/alert_server.py
+++ /dev/null
@@ -1,86 +0,0 @@
-from __future__ import annotations
-
-import argparse
-import asyncio
-import logging
-from pathlib import Path
-from typing import Any
-
-from aiohttp import web
-
-log = logging.getLogger(__name__)
-
-
-class AlertServer:
- shut_down: bool
- shut_down_event: asyncio.Event
- log: Any
- app: Any
- alert_file_path: Path
- port: int
-
- @staticmethod
- async def create_alert_server(alert_file_path: Path, port):
- self = AlertServer()
- self.log = log
- self.shut_down = False
- self.app = web.Application()
- self.shut_down_event = asyncio.Event()
- self.port = port
- routes = [
- web.get("/status", self.status),
- ]
-
- self.alert_file_path = alert_file_path
- self.app.add_routes(routes)
-
- return self
-
- async def status(self, request):
- file_text = self.alert_file_path.read_text()
- return web.Response(body=file_text, content_type="text/plain")
-
- async def stop(self):
- self.shut_down_event.set()
-
- async def run(self):
- runner = web.AppRunner(self.app, access_log=None)
- await runner.setup()
- site = web.TCPSite(runner, None, self.port)
- await site.start()
-
-
-async def run_and_wait(file_path, port):
- server = await AlertServer.create_alert_server(Path(file_path), port)
- await server.run()
- await server.shut_down_event.wait()
-
-
-def main():
- parser = argparse.ArgumentParser()
- parser.add_argument("-file_path", type=str, dest="file_path")
- parser.add_argument("-port", type=str, dest="port")
-
- port = None
- file_path = None
-
- for key, value in vars(parser.parse_args()).items():
- if key == "port":
- port = value
- elif key == "file_path":
- file_path = value
- else:
- print(f"Invalid argument {key}")
-
- if port is None or file_path is None:
- print(
- "Missing arguments, example usage:\n\n"
- "python chia/util/alert_server.py -p 4000 -file_path /home/user/alert.txt\n"
- )
- quit()
-
- return asyncio.run(run_and_wait(file_path, port))
-
-
-if __name__ == "__main__":
- main()
diff --git a/tests/util/blockchain.py b/tests/util/blockchain.py
--- a/tests/util/blockchain.py
+++ b/tests/util/blockchain.py
@@ -4,7 +4,7 @@
import pickle
import tempfile
from pathlib import Path
-from typing import List, Optional
+from typing import List, Optional, Tuple
from chia.consensus.blockchain import Blockchain
from chia.consensus.constants import ConsensusConstants
@@ -16,7 +16,7 @@
from chia.util.default_root import DEFAULT_ROOT_PATH
-async def create_blockchain(constants: ConsensusConstants, db_version: int):
+async def create_blockchain(constants: ConsensusConstants, db_version: int) -> Tuple[Blockchain, DBWrapper2, Path]:
db_path = Path(tempfile.NamedTemporaryFile().name)
if db_path.exists():
@@ -35,14 +35,14 @@ def persistent_blocks(
db_name: str,
bt: BlockTools,
seed: bytes = b"",
- empty_sub_slots=0,
+ empty_sub_slots: int = 0,
normalized_to_identity_cc_eos: bool = False,
normalized_to_identity_icc_eos: bool = False,
normalized_to_identity_cc_sp: bool = False,
normalized_to_identity_cc_ip: bool = False,
- block_list_input: List[FullBlock] = None,
+ block_list_input: Optional[List[FullBlock]] = None,
time_per_block: Optional[float] = None,
-):
+) -> List[FullBlock]:
# try loading from disc, if not create new blocks.db file
# TODO hash fixtures.py and blocktool.py, add to path, delete if the files changed
if block_list_input is None:
@@ -100,7 +100,7 @@ def new_test_db(
normalized_to_identity_icc_eos: bool = False, # ICC_EOS
normalized_to_identity_cc_sp: bool = False, # CC_SP,
normalized_to_identity_cc_ip: bool = False, # CC_IP
-):
+) -> List[FullBlock]:
print(f"create {path} with {num_of_blocks} blocks with ")
blocks: List[FullBlock] = bt.get_consecutive_blocks(
num_of_blocks,
diff --git a/tests/util/key_tool.py b/tests/util/key_tool.py
--- a/tests/util/key_tool.py
+++ b/tests/util/key_tool.py
@@ -6,7 +6,7 @@
from chia.simulator.block_tools import test_constants
from chia.types.coin_spend import CoinSpend
-from chia.util.condition_tools import conditions_by_opcode, conditions_for_solution, pkm_pairs_for_conditions_dict
+from chia.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
from tests.core.make_block_generator import GROUP_ORDER, int_to_public_key
@@ -28,11 +28,9 @@ def sign(self, public_key: bytes, message: bytes) -> G2Element:
def signature_for_solution(self, coin_spend: CoinSpend, additional_data: bytes) -> AugSchemeMPL:
signatures = []
- err, conditions, cost = conditions_for_solution(
+ conditions_dict = conditions_dict_for_solution(
coin_spend.puzzle_reveal, coin_spend.solution, test_constants.MAX_BLOCK_COST_CLVM
)
- assert conditions is not None
- conditions_dict = conditions_by_opcode(conditions)
for public_key, message in pkm_pairs_for_conditions_dict(
conditions_dict, coin_spend.coin.name(), additional_data
):
diff --git a/tests/util/misc.py b/tests/util/misc.py
--- a/tests/util/misc.py
+++ b/tests/util/misc.py
@@ -3,6 +3,7 @@
import contextlib
import dataclasses
import enum
+import functools
import gc
import math
import os
@@ -13,10 +14,10 @@
from textwrap import dedent
from time import thread_time
from types import TracebackType
-from typing import Any, Callable, Iterator, List, Optional, Type, Union
+from typing import Any, Callable, Collection, Iterator, List, Optional, Type, Union
import pytest
-from typing_extensions import final
+from typing_extensions import Protocol, final
from tests.core.data_layer.util import ChiaRoot
@@ -303,3 +304,31 @@ def closing_chia_root_popen(chia_root: ChiaRoot, args: List[str]) -> Iterator[su
process.wait(timeout=10)
except subprocess.TimeoutExpired:
process.kill()
+
+
+# https://github.com/pytest-dev/pytest/blob/7.3.1/src/_pytest/mark/__init__.py#L45
+Marks = Union[pytest.MarkDecorator, Collection[Union[pytest.MarkDecorator, pytest.Mark]]]
+
+
+class DataCase(Protocol):
+ marks: Marks
+
+ @property
+ def id(self) -> str:
+ ...
+
+
+def datacases(*cases: DataCase, _name: str = "case") -> pytest.MarkDecorator:
+ return pytest.mark.parametrize(
+ argnames=_name,
+ argvalues=[pytest.param(case, id=case.id, marks=case.marks) for case in cases],
+ )
+
+
+class DataCasesDecorator(Protocol):
+ def __call__(self, *cases: DataCase, _name: str = "case") -> pytest.MarkDecorator:
+ ...
+
+
+def named_datacases(name: str) -> DataCasesDecorator:
+ return functools.partial(datacases, _name=name)
diff --git a/tests/util/test_network.py b/tests/util/test_network.py
--- a/tests/util/test_network.py
+++ b/tests/util/test_network.py
@@ -7,36 +7,36 @@
import pytest
-from chia.util.network import IPAddress, get_host_addr
+from chia.util.network import IPAddress, resolve
class TestNetwork:
@pytest.mark.asyncio
- async def test_get_host_addr4(self):
+ async def test_resolve4(self):
# Run these tests forcing IPv4 resolution
prefer_ipv6 = False
- assert get_host_addr("127.0.0.1", prefer_ipv6=prefer_ipv6) == IPAddress.create("127.0.0.1")
- assert get_host_addr("10.11.12.13", prefer_ipv6=prefer_ipv6) == IPAddress.create("10.11.12.13")
- assert get_host_addr("localhost", prefer_ipv6=prefer_ipv6) == IPAddress.create("127.0.0.1")
- assert get_host_addr("example.net", prefer_ipv6=prefer_ipv6) == IPAddress.create("93.184.216.34")
+ assert await resolve("127.0.0.1", prefer_ipv6=prefer_ipv6) == IPAddress.create("127.0.0.1")
+ assert await resolve("10.11.12.13", prefer_ipv6=prefer_ipv6) == IPAddress.create("10.11.12.13")
+ assert await resolve("localhost", prefer_ipv6=prefer_ipv6) == IPAddress.create("127.0.0.1")
+ assert await resolve("example.net", prefer_ipv6=prefer_ipv6) == IPAddress.create("93.184.216.34")
@pytest.mark.asyncio
@pytest.mark.skipif(
condition=("GITHUB_ACTIONS" in os.environ) and (sys.platform in {"darwin", "win32"}),
reason="macOS and Windows runners in GitHub Actions do not seem to support IPv6",
)
- async def test_get_host_addr6(self):
+ async def test_resolve6(self):
# Run these tests forcing IPv6 resolution
prefer_ipv6 = True
- assert get_host_addr("::1", prefer_ipv6=prefer_ipv6) == IPAddress.create("::1")
- assert get_host_addr("2000:1000::1234:abcd", prefer_ipv6=prefer_ipv6) == IPAddress.create(
+ assert await resolve("::1", prefer_ipv6=prefer_ipv6) == IPAddress.create("::1")
+ assert await resolve("2000:1000::1234:abcd", prefer_ipv6=prefer_ipv6) == IPAddress.create(
"2000:1000::1234:abcd"
)
# ip6-localhost is not always available, and localhost is IPv4 only
# on some systems. Just test neither here.
- # assert get_host_addr("ip6-localhost", prefer_ipv6=prefer_ipv6) == IPAddress.create("::1")
- # assert get_host_addr("localhost", prefer_ipv6=prefer_ipv6) == IPAddress.create("::1")
- assert get_host_addr("example.net", prefer_ipv6=prefer_ipv6) == IPAddress.create(
+ # assert await resolve("ip6-localhost", prefer_ipv6=prefer_ipv6) == IPAddress.create("::1")
+ # assert await resolve("localhost", prefer_ipv6=prefer_ipv6) == IPAddress.create("::1")
+ assert await resolve("example.net", prefer_ipv6=prefer_ipv6) == IPAddress.create(
"2606:2800:220:1:248:1893:25c8:1946"
)
diff --git a/tests/util/test_network_protocol_test.py b/tests/util/test_network_protocol_test.py
--- a/tests/util/test_network_protocol_test.py
+++ b/tests/util/test_network_protocol_test.py
@@ -1,7 +1,9 @@
# flake8: noqa
from __future__ import annotations
-from typing import Any, List, Set
+import ast
+import inspect
+from typing import Any, Set, cast
from chia.protocols import (
farmer_protocol,
@@ -19,16 +21,16 @@
def types_in_module(mod: Any) -> Set[str]:
- ret: List[str] = []
- mod_name = mod.__name__
- for sym in dir(mod):
- obj = getattr(mod, sym)
- if hasattr(obj, "__module__") and obj.__module__ == mod_name:
- ret.append(sym)
-
- if hasattr(mod, "__all__"):
- ret += getattr(mod, "__all__")
- return set(ret)
+ parsed = ast.parse(inspect.getsource(mod))
+ types = set()
+ for line in parsed.body:
+ if isinstance(line, ast.Assign):
+ name = cast(ast.Name, line.targets[0])
+ if inspect.isclass(getattr(mod, name.id)):
+ types.add(name.id)
+ elif isinstance(line, ast.ClassDef):
+ types.add(line.name)
+ return types
def test_missing_messages_state_machine() -> None:
@@ -155,8 +157,6 @@ def test_missing_messages() -> None:
"PutFarmerPayload",
"PutFarmerRequest",
"PutFarmerResponse",
- "get_current_authentication_token",
- "validate_authentication_token",
}
timelord_msgs = {
diff --git a/tests/util/test_tests_misc.py b/tests/util/test_tests_misc.py
new file mode 100644
--- /dev/null
+++ b/tests/util/test_tests_misc.py
@@ -0,0 +1,38 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+
+import pytest
+
+from tests.util.misc import Marks, datacases, named_datacases
+
+
+@dataclass
+class DataCase:
+ id: str
+ marks: Marks
+
+
+sample_cases = [
+ DataCase(id="id_a", marks=[pytest.mark.test_mark_a1, pytest.mark.test_mark_a2]),
+ DataCase(id="id_b", marks=[pytest.mark.test_mark_b1, pytest.mark.test_mark_b2]),
+]
+
+
+def sample_result(name: str) -> pytest.MarkDecorator:
+ return pytest.mark.parametrize(
+ argnames=name,
+ argvalues=[pytest.param(case, id=case.id, marks=case.marks) for case in sample_cases],
+ )
+
+
+def test_datacases() -> None:
+ result = datacases(*sample_cases)
+
+ assert result == sample_result(name="case")
+
+
+def test_named_datacases() -> None:
+ result = named_datacases("Sharrilanda")(*sample_cases)
+
+ assert result == sample_result(name="Sharrilanda")
diff --git a/tests/util/test_trusted_peer.py b/tests/util/test_trusted_peer.py
new file mode 100644
--- /dev/null
+++ b/tests/util/test_trusted_peer.py
@@ -0,0 +1,41 @@
+from __future__ import annotations
+
+from typing import Any, Dict
+
+import pytest
+
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.util.network import is_trusted_peer
+
+
+@pytest.mark.parametrize(
+ "host,node_id,trusted_peers,testing,result",
+ [
+ # IPv6 localhost testing
+ ("::1", bytes32(b"0" * 32), {}, False, True),
+ # IPv6 localhost testing with mismatched node_id (still True)
+ ("::1", bytes32(b"d" * 32), {bytes32(b"a" * 32).hex(): "0"}, False, True),
+ # IPv6 localhost testing with testing flag True
+ ("::1", bytes32(b"0" * 32), {}, True, False),
+ ("::1", bytes32(b"d" * 32), {bytes32(b"a" * 32).hex(): "0"}, True, False),
+ # IPv6 localhost long form
+ ("0:0:0:0:0:0:0:1", bytes32(b"0" * 32), {}, False, True),
+ ("0:0:0:0:0:0:0:1", bytes32(b"0" * 32), {}, True, False),
+ # IPv4 localhost testing
+ ("127.0.0.1", bytes32(b"0" * 32), {}, False, True),
+ ("localhost", bytes32(b"0" * 32), {}, False, True),
+ ("127.0.0.1", bytes32(b"0" * 32), {}, True, False),
+ ("localhost", bytes32(b"0" * 32), {}, True, False),
+ # loalhost testing with testing True but with matching node_id
+ ("127.0.0.1", bytes32(b"0" * 32), {bytes32(b"0" * 32).hex(): "0"}, True, True),
+ # misc
+ ("2000:1000::1234:abcd", bytes32(b"0" * 32), {}, True, False),
+ ("10.11.12.13", bytes32(b"d" * 32), {bytes32(b"a" * 32).hex(): "0"}, False, False),
+ ("10.11.12.13", bytes32(b"d" * 32), {bytes32(b"d" * 32).hex(): "0"}, False, True),
+ ("10.11.12.13", bytes32(b"d" * 32), {}, False, False),
+ ],
+)
+def test_is_trusted_peer(
+ host: str, node_id: bytes32, trusted_peers: Dict[str, Any], testing: bool, result: bool
+) -> None:
+ assert is_trusted_peer(host=host, node_id=node_id, trusted_peers=trusted_peers, testing=testing) == result
diff --git a/tests/wallet/cat_wallet/test_cat_wallet.py b/tests/wallet/cat_wallet/test_cat_wallet.py
--- a/tests/wallet/cat_wallet/test_cat_wallet.py
+++ b/tests/wallet/cat_wallet/test_cat_wallet.py
@@ -7,17 +7,29 @@
from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from chia.rpc.wallet_rpc_api import WalletRpcApi
+from chia.rpc.wallet_rpc_client import WalletRpcClient
+from chia.simulator.full_node_simulator import FullNodeSimulator
+from chia.simulator.setup_nodes import SimulatorsAndWalletsServices
from chia.simulator.simulator_protocol import FarmNewBlockProtocol, ReorgProtocol
-from chia.simulator.time_out_assert import time_out_assert
-from chia.types.blockchain_format.coin import Coin
+from chia.simulator.time_out_assert import time_out_assert, time_out_assert_not_none
+from chia.types.blockchain_format.coin import Coin, coin_as_list
+from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_spend import CoinSpend
from chia.types.peer_info import PeerInfo
+from chia.util.bech32m import encode_puzzle_hash
from chia.util.ints import uint16, uint32, uint64
from chia.wallet.cat_wallet.cat_constants import DEFAULT_CATS
from chia.wallet.cat_wallet.cat_info import LegacyCATInfo
from chia.wallet.cat_wallet.cat_utils import construct_cat_puzzle
from chia.wallet.cat_wallet.cat_wallet import CATWallet
+from chia.wallet.derivation_record import DerivationRecord
+from chia.wallet.derive_keys import _derive_path_unhardened, master_sk_to_wallet_sk_unhardened_intermediate
+from chia.wallet.lineage_proof import LineageProof
from chia.wallet.puzzles.cat_loader import CAT_MOD
+from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import puzzle_hash_for_pk
from chia.wallet.transaction_record import TransactionRecord
+from chia.wallet.util.wallet_types import WalletType
from chia.wallet.wallet_info import WalletInfo
@@ -204,7 +216,7 @@ async def test_cat_spend(self, self_hostname, two_wallet_nodes, trusted):
await time_out_assert(20, cat_wallet.get_pending_change_balance, 40)
memos = await api_0.get_transaction_memo(dict(transaction_id=tx_id))
- assert len(memos[tx_id]) == 1
+ assert len(memos[tx_id]) == 2 # One for tx, one for change
assert list(memos[tx_id].values())[0][0] == cat_2_hash.hex()
for i in range(1, num_blocks):
@@ -222,7 +234,7 @@ async def test_cat_spend(self, self_hostname, two_wallet_nodes, trusted):
coin = coins.pop()
tx_id = coin.name().hex()
memos = await api_1.get_transaction_memo(dict(transaction_id=tx_id))
- assert len(memos[tx_id]) == 1
+ assert len(memos[tx_id]) == 2
assert list(memos[tx_id].values())[0][0] == cat_2_hash.hex()
cat_hash = await cat_wallet.get_new_inner_hash()
tx_records = await cat_wallet_2.generate_signed_transaction([uint64(15)], [cat_hash])
@@ -605,7 +617,7 @@ async def test_cat_spend_multiple(self, self_hostname, three_wallet_nodes, trust
for tx in txs:
if tx.amount == 30:
memos = tx.get_memos()
- assert len(memos) == 1
+ assert len(memos) == 2 # One for tx, one for change
assert b"Markus Walburg" in [v for v_list in memos.values() for v in v_list]
assert list(memos.keys())[0] in [a.name() for a in tx.spend_bundle.additions()]
@@ -827,3 +839,164 @@ async def check_wallets(node):
await time_out_assert(20, cat_wallet.get_confirmed_balance, 35)
await time_out_assert(20, cat_wallet.get_unconfirmed_balance, 35)
+
+ @pytest.mark.parametrize(
+ "trusted",
+ [True, False],
+ )
+ @pytest.mark.asyncio
+ async def test_cat_change_detection(
+ self, self_hostname: str, one_wallet_and_one_simulator_services: SimulatorsAndWalletsServices, trusted: bool
+ ) -> None:
+ num_blocks = 1
+ full_nodes, wallets, bt = one_wallet_and_one_simulator_services
+ full_node_api: FullNodeSimulator = full_nodes[0]._api
+ full_node_server = full_node_api.full_node.server
+ wallet_service_0 = wallets[0]
+ wallet_node_0 = wallet_service_0._node
+ wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
+
+ assert wallet_service_0.rpc_server is not None
+
+ client_0 = await WalletRpcClient.create(
+ bt.config["self_hostname"],
+ wallet_service_0.rpc_server.listen_port,
+ wallet_service_0.root_path,
+ wallet_service_0.config,
+ )
+ wallet_node_0.config["automatically_add_unknown_cats"] = True
+
+ if trusted:
+ wallet_node_0.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ else:
+ wallet_node_0.config["trusted_peers"] = {}
+
+ await wallet_node_0.server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
+ await full_node_api.farm_blocks_to_wallet(count=num_blocks, wallet=wallet_0)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=20)
+
+ # Mint CAT to ourselves, immediately spend it to an unhinted puzzle hash that we have manually added to the DB
+ # We should pick up this coin as balance even though it is unhinted because it is "change"
+ intermediate_sk_un = master_sk_to_wallet_sk_unhardened_intermediate(
+ wallet_node_0.wallet_state_manager.private_key
+ )
+ pubkey_unhardened = _derive_path_unhardened(intermediate_sk_un, [100000000]).get_g1()
+ inner_puzhash = puzzle_hash_for_pk(pubkey_unhardened)
+ puzzlehash_unhardened = construct_cat_puzzle(
+ CAT_MOD,
+ Program.to(None).get_tree_hash(),
+ inner_puzhash, # type: ignore[arg-type]
+ ).get_tree_hash_precalc(inner_puzhash)
+ change_derivation = DerivationRecord(
+ uint32(0),
+ puzzlehash_unhardened,
+ pubkey_unhardened,
+ WalletType.CAT,
+ uint32(2),
+ False,
+ )
+ # Insert the derivation record before the wallet exists so that it is not subscribed to
+ await wallet_node_0.wallet_state_manager.puzzle_store.add_derivation_paths([change_derivation])
+ our_puzzle: Program = await wallet_0.get_new_puzzle()
+ cat_puzzle: Program = construct_cat_puzzle(
+ CAT_MOD,
+ Program.to(None).get_tree_hash(),
+ Program.to(1),
+ )
+ addr = encode_puzzle_hash(cat_puzzle.get_tree_hash(), "txch")
+ cat_amount_0 = uint64(100)
+ cat_amount_1 = uint64(5)
+
+ tx = await client_0.send_transaction(1, cat_amount_0, addr)
+ spend_bundle = tx.spend_bundle
+ assert spend_bundle is not None
+
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
+ await full_node_api.farm_blocks_to_wallet(count=num_blocks, wallet=wallet_0)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=20)
+
+ # Do the eve spend back to our wallet and add the CR layer
+ cat_coin = next(c for c in spend_bundle.additions() if c.amount == cat_amount_0)
+ next_coin = Coin(
+ cat_coin.name(),
+ construct_cat_puzzle(
+ CAT_MOD,
+ Program.to(None).get_tree_hash(),
+ our_puzzle,
+ ).get_tree_hash(),
+ cat_amount_0,
+ )
+ eve_spend = await wallet_node_0.wallet_state_manager.main_wallet.sign_transaction(
+ [
+ CoinSpend(
+ cat_coin,
+ cat_puzzle,
+ Program.to(
+ [
+ Program.to(
+ [
+ [
+ 51,
+ our_puzzle.get_tree_hash(),
+ cat_amount_0,
+ [our_puzzle.get_tree_hash()],
+ ],
+ [51, None, -113, None, None],
+ ]
+ ),
+ None,
+ cat_coin.name(),
+ coin_as_list(cat_coin),
+ [cat_coin.parent_coin_info, Program.to(1).get_tree_hash(), cat_coin.amount],
+ 0,
+ 0,
+ ]
+ ),
+ ),
+ CoinSpend(
+ next_coin,
+ construct_cat_puzzle(
+ CAT_MOD,
+ Program.to(None).get_tree_hash(),
+ our_puzzle,
+ ),
+ Program.to(
+ [
+ [
+ None,
+ (
+ 1,
+ [
+ [51, inner_puzhash, cat_amount_1],
+ [51, bytes32([0] * 32), cat_amount_0 - cat_amount_1],
+ ],
+ ),
+ None,
+ ],
+ LineageProof(
+ cat_coin.parent_coin_info, Program.to(1).get_tree_hash(), cat_amount_0
+ ).to_program(),
+ next_coin.name(),
+ coin_as_list(next_coin),
+ [next_coin.parent_coin_info, our_puzzle.get_tree_hash(), next_coin.amount],
+ 0,
+ 0,
+ ]
+ ),
+ ),
+ ],
+ )
+ await client_0.push_tx(eve_spend)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, eve_spend.name())
+ await full_node_api.farm_blocks_to_wallet(count=num_blocks, wallet=wallet_0)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=20)
+
+ async def check_wallets(node):
+ return len(node.wallet_state_manager.wallets.keys())
+
+ await time_out_assert(20, check_wallets, 2, wallet_node_0)
+ cat_wallet = wallet_node_0.wallet_state_manager.wallets[uint32(2)]
+ await time_out_assert(20, cat_wallet.get_confirmed_balance, cat_amount_1)
+ assert not full_node_api.full_node.subscriptions.has_ph_subscription(puzzlehash_unhardened)
diff --git a/tests/wallet/cat_wallet/test_offer_lifecycle.py b/tests/wallet/cat_wallet/test_offer_lifecycle.py
--- a/tests/wallet/cat_wallet/test_offer_lifecycle.py
+++ b/tests/wallet/cat_wallet/test_offer_lifecycle.py
@@ -60,7 +60,7 @@ async def generate_coins(
if tail_str:
tail: Program = str_to_tail(tail_str) # Making a fake but unique TAIL
cat_puzzle: Program = construct_cat_puzzle(CAT_MOD, tail.get_tree_hash(), acs)
- payments.append(Payment(cat_puzzle.get_tree_hash(), amount, []))
+ payments.append(Payment(cat_puzzle.get_tree_hash(), amount))
cat_bundles.append(
unsigned_spend_bundle_for_spendable_cats(
CAT_MOD,
@@ -75,7 +75,7 @@ async def generate_coins(
)
)
else:
- payments.append(Payment(acs_ph, amount, []))
+ payments.append(Payment(acs_ph, amount))
# This bundle creates all of the initial coins
parent_bundle = SpendBundle(
diff --git a/tests/wallet/cat_wallet/test_trades.py b/tests/wallet/cat_wallet/test_trades.py
--- a/tests/wallet/cat_wallet/test_trades.py
+++ b/tests/wallet/cat_wallet/test_trades.py
@@ -35,12 +35,12 @@
class TestCATTrades:
@pytest.mark.asyncio
@pytest.mark.parametrize(
- "forwards_compat",
- [True, False],
- )
- @pytest.mark.parametrize(
- "reuse_puzhash",
- [True, False],
+ "forwards_compat,reuse_puzhash",
+ [
+ (True, False),
+ (False, True),
+ (False, False),
+ ],
)
async def test_cat_trades(
self, wallets_prefarm, forwards_compat: bool, reuse_puzhash: bool, softfork_height: uint32
@@ -161,12 +161,11 @@ async def test_cat_trades(
assert trade_make is not None
peer = wallet_node_taker.get_full_node_peer()
- assert peer is not None
trade_take, tx_records = await trade_manager_taker.respond_to_offer(
old_maker_offer if forwards_compat else Offer.from_bytes(trade_make.offer),
peer,
fee=uint64(1),
- reuse_puzhash=reuse_puzhash and not forwards_compat,
+ reuse_puzhash=reuse_puzhash,
)
assert trade_take is not None
assert tx_records is not None
@@ -333,7 +332,7 @@ async def assert_trade_tx_number(wallet_node, trade_id, number):
trade_take, tx_records = await trade_manager_taker.respond_to_offer(
old_maker_offer if forwards_compat else Offer.from_bytes(trade_make.offer),
peer,
- reuse_puzhash=reuse_puzhash and not forwards_compat,
+ reuse_puzhash=reuse_puzhash,
)
await time_out_assert(15, full_node.txs_in_mempool, True, tx_records)
assert trade_take is not None
@@ -675,7 +674,6 @@ async def get_trade_and_status(trade_manager, trade) -> TradeStatus:
await time_out_assert(15, wallet_taker.get_confirmed_balance, taker_funds)
peer = wallet_node_taker.get_full_node_peer()
- assert peer is not None
with pytest.raises(ValueError, match="This offer is no longer valid"):
await trade_manager_taker.respond_to_offer(Offer.from_bytes(trade_make.offer), peer)
diff --git a/tests/wallet/db_wallet/test_db_graftroot.py b/tests/wallet/db_wallet/test_db_graftroot.py
--- a/tests/wallet/db_wallet/test_db_graftroot.py
+++ b/tests/wallet/db_wallet/test_db_graftroot.py
@@ -16,7 +16,7 @@
from chia.wallet.puzzles.load_clvm import load_clvm
from chia.wallet.util.merkle_utils import build_merkle_tree, build_merkle_tree_from_binary_tree, simplify_merkle_proof
-GRAFTROOT_MOD = load_clvm("graftroot_dl_offers.clvm")
+GRAFTROOT_MOD = load_clvm("graftroot_dl_offers.clsp")
# Always returns the last value
# (mod solution
diff --git a/tests/wallet/db_wallet/test_dl_offers.py b/tests/wallet/db_wallet/test_dl_offers.py
--- a/tests/wallet/db_wallet/test_dl_offers.py
+++ b/tests/wallet/db_wallet/test_dl_offers.py
@@ -57,13 +57,10 @@ async def test_dl_offers(wallets_prefarm: Any, trusted: bool, forwards_compat: b
wsm_maker = wallet_node_maker.wallet_state_manager
wsm_taker = wallet_node_taker.wallet_state_manager
- wallet_maker = wsm_maker.main_wallet
- wallet_taker = wsm_taker.main_wallet
-
async with wsm_maker.lock:
- dl_wallet_maker = await DataLayerWallet.create_new_dl_wallet(wsm_maker, wallet_maker)
+ dl_wallet_maker = await DataLayerWallet.create_new_dl_wallet(wsm_maker)
async with wsm_taker.lock:
- dl_wallet_taker = await DataLayerWallet.create_new_dl_wallet(wsm_taker, wallet_taker)
+ dl_wallet_taker = await DataLayerWallet.create_new_dl_wallet(wsm_taker)
MAKER_ROWS = [bytes32([i] * 32) for i in range(0, 10)]
TAKER_ROWS = [bytes32([i] * 32) for i in range(0, 10)]
@@ -91,7 +88,6 @@ async def test_dl_offers(wallets_prefarm: Any, trusted: bool, forwards_compat: b
await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet_taker, launcher_id_taker, taker_root)
peer = wallet_node_taker.get_full_node_peer()
- assert peer is not None
await dl_wallet_maker.track_new_launcher_id(launcher_id_taker, peer)
await dl_wallet_taker.track_new_launcher_id(launcher_id_maker, peer)
await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet_maker, launcher_id_taker, taker_root)
@@ -221,6 +217,9 @@ async def test_dl_offers(wallets_prefarm: Any, trusted: bool, forwards_compat: b
]
}
+ wallet_maker = wsm_maker.main_wallet
+ wallet_taker = wsm_taker.main_wallet
+
await time_out_assert(15, wallet_maker.get_unconfirmed_balance, maker_funds)
await time_out_assert(15, wallet_taker.get_unconfirmed_balance, taker_funds - fee)
@@ -264,10 +263,8 @@ async def test_dl_offer_cancellation(wallets_prefarm: Any, trusted: bool) -> Non
assert wallet_node.wallet_state_manager is not None
wsm = wallet_node.wallet_state_manager
- wallet = wsm.main_wallet
-
async with wsm.lock:
- dl_wallet = await DataLayerWallet.create_new_dl_wallet(wsm, wallet)
+ dl_wallet = await DataLayerWallet.create_new_dl_wallet(wsm)
ROWS = [bytes32([i] * 32) for i in range(0, 10)]
root, _ = build_merkle_tree(ROWS)
@@ -337,13 +334,10 @@ async def test_multiple_dl_offers(wallets_prefarm: Any, trusted: bool, forwards_
wsm_maker = wallet_node_maker.wallet_state_manager
wsm_taker = wallet_node_taker.wallet_state_manager
- wallet_maker = wsm_maker.main_wallet
- wallet_taker = wsm_taker.main_wallet
-
async with wsm_maker.lock:
- dl_wallet_maker = await DataLayerWallet.create_new_dl_wallet(wsm_maker, wallet_maker)
+ dl_wallet_maker = await DataLayerWallet.create_new_dl_wallet(wsm_maker)
async with wsm_taker.lock:
- dl_wallet_taker = await DataLayerWallet.create_new_dl_wallet(wsm_taker, wallet_taker)
+ dl_wallet_taker = await DataLayerWallet.create_new_dl_wallet(wsm_taker)
MAKER_ROWS = [bytes32([i] * 32) for i in range(0, 10)]
TAKER_ROWS = [bytes32([i] * 32) for i in range(10, 20)]
@@ -387,7 +381,6 @@ async def test_multiple_dl_offers(wallets_prefarm: Any, trusted: bool, forwards_
await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet_taker, launcher_id_taker_2, taker_root)
peer = wallet_node_taker.get_full_node_peer()
- assert peer is not None
await dl_wallet_maker.track_new_launcher_id(launcher_id_taker_1, peer)
await dl_wallet_maker.track_new_launcher_id(launcher_id_taker_2, peer)
await dl_wallet_taker.track_new_launcher_id(launcher_id_maker_1, peer)
@@ -505,6 +498,9 @@ async def test_multiple_dl_offers(wallets_prefarm: Any, trusted: bool, forwards_
assert offer_taker is not None
assert tx_records is not None
+ wallet_maker = wsm_maker.main_wallet
+ wallet_taker = wsm_taker.main_wallet
+
await time_out_assert(15, wallet_maker.get_unconfirmed_balance, maker_funds)
await time_out_assert(15, wallet_taker.get_unconfirmed_balance, taker_funds - fee)
diff --git a/tests/wallet/db_wallet/test_dl_wallet.py b/tests/wallet/db_wallet/test_dl_wallet.py
--- a/tests/wallet/db_wallet/test_dl_wallet.py
+++ b/tests/wallet/db_wallet/test_dl_wallet.py
@@ -59,7 +59,7 @@ async def test_initial_creation(
await time_out_assert(10, wallet_0.get_confirmed_balance, funds)
async with wallet_node_0.wallet_state_manager.lock:
- dl_wallet = await DataLayerWallet.create_new_dl_wallet(wallet_node_0.wallet_state_manager, wallet_0)
+ dl_wallet = await DataLayerWallet.create_new_dl_wallet(wallet_node_0.wallet_state_manager)
nodes = [Program.to("thing").get_tree_hash(), Program.to([8]).get_tree_hash()]
current_tree = MerkleTree(nodes)
@@ -109,7 +109,7 @@ async def test_get_owned_singletons(
await time_out_assert(10, wallet_0.get_confirmed_balance, funds)
async with wallet_node_0.wallet_state_manager.lock:
- dl_wallet = await DataLayerWallet.create_new_dl_wallet(wallet_node_0.wallet_state_manager, wallet_0)
+ dl_wallet = await DataLayerWallet.create_new_dl_wallet(wallet_node_0.wallet_state_manager)
nodes = [Program.to("thing").get_tree_hash(), Program.to([8]).get_tree_hash()]
current_tree = MerkleTree(nodes)
@@ -150,7 +150,6 @@ async def test_tracking_non_owned(
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
- wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
if trusted:
wallet_node_0.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()}
@@ -168,10 +167,10 @@ async def test_tracking_non_owned(
await time_out_assert(10, wallet_0.get_confirmed_balance, funds)
async with wallet_node_0.wallet_state_manager.lock:
- dl_wallet_0 = await DataLayerWallet.create_new_dl_wallet(wallet_node_0.wallet_state_manager, wallet_0)
+ dl_wallet_0 = await DataLayerWallet.create_new_dl_wallet(wallet_node_0.wallet_state_manager)
async with wallet_node_1.wallet_state_manager.lock:
- dl_wallet_1 = await DataLayerWallet.create_new_dl_wallet(wallet_node_1.wallet_state_manager, wallet_1)
+ dl_wallet_1 = await DataLayerWallet.create_new_dl_wallet(wallet_node_1.wallet_state_manager)
nodes = [Program.to("thing").get_tree_hash(), Program.to([8]).get_tree_hash()]
current_tree = MerkleTree(nodes)
@@ -189,7 +188,6 @@ async def test_tracking_non_owned(
await asyncio.sleep(0.5)
peer = wallet_node_1.get_full_node_peer()
- assert peer is not None
await dl_wallet_1.track_new_launcher_id(launcher_id, peer)
await time_out_assert(15, is_singleton_confirmed, True, dl_wallet_1, launcher_id)
await asyncio.sleep(0.5)
@@ -245,7 +243,7 @@ async def test_lifecycle(
await time_out_assert(10, wallet_0.get_confirmed_balance, funds)
async with wallet_node_0.wallet_state_manager.lock:
- dl_wallet = await DataLayerWallet.create_new_dl_wallet(wallet_node_0.wallet_state_manager, wallet_0)
+ dl_wallet = await DataLayerWallet.create_new_dl_wallet(wallet_node_0.wallet_state_manager)
nodes = [Program.to("thing").get_tree_hash(), Program.to([8]).get_tree_hash()]
current_tree = MerkleTree(nodes)
@@ -348,10 +346,10 @@ async def test_rebase(self, self_hostname: str, two_wallet_nodes: SimulatorsAndW
await time_out_assert(10, wallet_1.get_confirmed_balance, funds)
async with wallet_node_0.wallet_state_manager.lock:
- dl_wallet_0 = await DataLayerWallet.create_new_dl_wallet(wallet_node_0.wallet_state_manager, wallet_0)
+ dl_wallet_0 = await DataLayerWallet.create_new_dl_wallet(wallet_node_0.wallet_state_manager)
async with wallet_node_1.wallet_state_manager.lock:
- dl_wallet_1 = await DataLayerWallet.create_new_dl_wallet(wallet_node_1.wallet_state_manager, wallet_1)
+ dl_wallet_1 = await DataLayerWallet.create_new_dl_wallet(wallet_node_1.wallet_state_manager)
nodes = [Program.to("thing").get_tree_hash(), Program.to([8]).get_tree_hash()]
current_tree = MerkleTree(nodes)
@@ -379,7 +377,6 @@ async def is_singleton_confirmed(wallet: DataLayerWallet, lid: bytes32) -> bool:
await asyncio.sleep(0.5)
peer = wallet_node_1.get_full_node_peer()
- assert peer is not None
await dl_wallet_1.track_new_launcher_id(launcher_id, peer)
await time_out_assert(15, is_singleton_confirmed, True, dl_wallet_1, launcher_id)
current_record = await dl_wallet_1.get_latest_singleton(launcher_id)
@@ -528,13 +525,10 @@ async def test_mirrors(wallets_prefarm: Any, trusted: bool) -> None:
wsm_1 = wallet_node_1.wallet_state_manager
wsm_2 = wallet_node_2.wallet_state_manager
- wallet_1 = wsm_1.main_wallet
- wallet_2 = wsm_2.main_wallet
-
async with wsm_1.lock:
- dl_wallet_1 = await DataLayerWallet.create_new_dl_wallet(wsm_1, wallet_1)
+ dl_wallet_1 = await DataLayerWallet.create_new_dl_wallet(wsm_1)
async with wsm_2.lock:
- dl_wallet_2 = await DataLayerWallet.create_new_dl_wallet(wsm_2, wallet_2)
+ dl_wallet_2 = await DataLayerWallet.create_new_dl_wallet(wsm_2)
dl_record, std_record, launcher_id_1 = await dl_wallet_1.generate_new_reporter(bytes32([0] * 32))
assert await dl_wallet_1.get_latest_singleton(launcher_id_1) is not None
@@ -551,10 +545,8 @@ async def test_mirrors(wallets_prefarm: Any, trusted: bool) -> None:
await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet_2, launcher_id_2, bytes32([0] * 32))
peer_1 = wallet_node_1.get_full_node_peer()
- assert peer_1 is not None
await dl_wallet_1.track_new_launcher_id(launcher_id_2, peer_1)
peer_2 = wallet_node_2.get_full_node_peer()
- assert peer_2 is not None
await dl_wallet_2.track_new_launcher_id(launcher_id_1, peer_2)
await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet_1, launcher_id_2, bytes32([0] * 32))
await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet_2, launcher_id_1, bytes32([0] * 32))
diff --git a/tests/wallet/did_wallet/test_did.py b/tests/wallet/did_wallet/test_did.py
--- a/tests/wallet/did_wallet/test_did.py
+++ b/tests/wallet/did_wallet/test_did.py
@@ -7,10 +7,8 @@
import pytest
from blspy import AugSchemeMPL, G1Element, G2Element
-from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from chia.rpc.wallet_rpc_api import WalletRpcApi
from chia.simulator.setup_nodes import SimulatorsAndWallets
-from chia.simulator.simulator_protocol import FarmNewBlockProtocol
from chia.simulator.time_out_assert import time_out_assert, time_out_assert_not_none
from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.sized_bytes import bytes32
@@ -19,9 +17,9 @@
from chia.types.spend_bundle import SpendBundle
from chia.util.bech32m import decode_puzzle_hash, encode_puzzle_hash
from chia.util.condition_tools import conditions_dict_for_solution
-from chia.util.ints import uint16, uint32, uint64
+from chia.util.ints import uint16, uint64
from chia.wallet.did_wallet.did_wallet import DIDWallet
-from chia.wallet.singleton import create_fullpuz
+from chia.wallet.singleton import create_singleton_puzzle
from chia.wallet.util.address_type import AddressType
from chia.wallet.util.wallet_types import WalletType
from chia.wallet.wallet import CHIP_0002_SIGN_MESSAGE_PREFIX
@@ -48,7 +46,6 @@ async def test_creation_from_coin_spend(
Verify that DIDWallet.create_new_did_wallet_from_coin_spend() is called after Singleton creation on
the blockchain, and that the wallet is created in the second wallet node.
"""
- num_blocks = 5
full_nodes, wallets, _ = two_nodes_two_wallets_with_same_keys
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
@@ -84,20 +81,8 @@ async def test_creation_from_coin_spend(
await server_0.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph0))
-
- funds = sum(
- [
- calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
- for i in range(1, num_blocks - 1)
- ]
- )
-
- await time_out_assert(10, wallet_0.get_unconfirmed_balance, funds)
- await time_out_assert(10, wallet_0.get_confirmed_balance, funds)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph1))
+ await full_node_api.farm_blocks_to_wallet(1, wallet_0)
+ await full_node_api.farm_blocks_to_wallet(1, wallet_1)
# Wallet1 sets up DIDWallet1 without any backup set
async with wallet_node_0.wallet_state_manager.lock:
@@ -113,15 +98,13 @@ async def test_creation_from_coin_spend(
assert spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph0))
+ await full_node_api.farm_blocks_to_wallet(1, wallet_0)
await time_out_assert(15, did_wallet_0.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet_0.get_unconfirmed_balance, 101)
await time_out_assert(15, did_wallet_0.get_pending_change_balance, 0)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph0))
+ await full_node_api.farm_blocks_to_wallet(1, wallet_0)
#######################
all_node_0_wallets = await wallet_node_0.wallet_state_manager.user_store.get_all_wallet_info_entries()
@@ -139,7 +122,6 @@ async def test_creation_from_coin_spend(
)
@pytest.mark.asyncio
async def test_creation_from_backup_file(self, self_hostname, three_wallet_nodes, trusted):
- num_blocks = 5
full_nodes, wallets, _ = three_wallet_nodes
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
@@ -150,9 +132,6 @@ async def test_creation_from_backup_file(self, self_hostname, three_wallet_nodes
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
wallet_2 = wallet_node_2.wallet_state_manager.main_wallet
- ph = await wallet_0.get_new_puzzlehash()
- ph1 = await wallet_1.get_new_puzzlehash()
- ph2 = await wallet_2.get_new_puzzlehash()
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
@@ -171,22 +150,9 @@ async def test_creation_from_backup_file(self, self_hostname, three_wallet_nodes
await server_1.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
await server_2.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
-
- funds = sum(
- [
- calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
- for i in range(1, num_blocks - 1)
- ]
- )
-
- await time_out_assert(10, wallet_0.get_unconfirmed_balance, funds)
- await time_out_assert(10, wallet_0.get_confirmed_balance, funds)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph1))
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
+ await full_node_api.farm_blocks_to_wallet(1, wallet_0)
+ await full_node_api.farm_blocks_to_wallet(1, wallet_1)
+ await full_node_api.farm_blocks_to_wallet(1, wallet_2)
# Wallet1 sets up DIDWallet1 without any backup set
async with wallet_node_0.wallet_state_manager.lock:
@@ -201,8 +167,7 @@ async def test_creation_from_backup_file(self, self_hostname, three_wallet_nodes
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_blocks_to_wallet(1, wallet_0)
await time_out_assert(15, did_wallet_0.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet_0.get_unconfirmed_balance, 101)
@@ -222,8 +187,7 @@ async def test_creation_from_backup_file(self, self_hostname, three_wallet_nodes
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_blocks_to_wallet(1, wallet_0)
await time_out_assert(15, did_wallet_1.get_confirmed_balance, 201)
await time_out_assert(15, did_wallet_1.get_unconfirmed_balance, 201)
@@ -253,8 +217,7 @@ async def test_creation_from_backup_file(self, self_hostname, three_wallet_nodes
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_blocks_to_wallet(1, wallet_0)
(
test_info_list,
@@ -272,8 +235,7 @@ async def test_creation_from_backup_file(self, self_hostname, three_wallet_nodes
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_blocks_to_wallet(1, wallet_0)
await time_out_assert(45, did_wallet_2.get_confirmed_balance, 201)
await time_out_assert(45, did_wallet_2.get_unconfirmed_balance, 201)
@@ -288,8 +250,7 @@ async def test_creation_from_backup_file(self, self_hostname, three_wallet_nodes
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_blocks_to_wallet(1, wallet_0)
async def get_coins_with_ph():
coins = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, some_ph)
@@ -307,7 +268,6 @@ async def get_coins_with_ph():
)
@pytest.mark.asyncio
async def test_did_recovery_with_multiple_backup_dids(self, self_hostname, two_wallet_nodes, trusted):
- num_blocks = 5
full_nodes, wallets, _ = two_wallet_nodes
full_node_api = full_nodes[0]
server_1 = full_node_api.server
@@ -316,7 +276,6 @@ async def test_did_recovery_with_multiple_backup_dids(self, self_hostname, two_w
wallet = wallet_node.wallet_state_manager.main_wallet
wallet2 = wallet_node_2.wallet_state_manager.main_wallet
- ph = await wallet.get_new_puzzlehash()
if trusted:
wallet_node.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
@@ -330,17 +289,7 @@ async def test_did_recovery_with_multiple_backup_dids(self, self_hostname, two_w
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
await server_3.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
-
- funds = sum(
- [
- calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
- for i in range(1, num_blocks - 1)
- ]
- )
-
- await time_out_assert(15, wallet.get_confirmed_balance, funds)
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
async with wallet_node.wallet_state_manager.lock:
did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet(
@@ -352,9 +301,7 @@ async def test_did_recovery_with_multiple_backup_dids(self, self_hostname, two_w
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- ph = await wallet2.get_new_puzzlehash()
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_blocks_to_wallet(1, wallet2)
await time_out_assert(15, did_wallet.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet.get_unconfirmed_balance, 101)
@@ -373,8 +320,7 @@ async def test_did_recovery_with_multiple_backup_dids(self, self_hostname, two_w
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_blocks_to_wallet(1, wallet2)
await time_out_assert(15, did_wallet_2.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet_2.get_unconfirmed_balance, 101)
@@ -395,9 +341,7 @@ async def test_did_recovery_with_multiple_backup_dids(self, self_hostname, two_w
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- ph2 = await wallet.get_new_puzzlehash()
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
assert did_wallet_3.did_info.backup_ids == recovery_list
await time_out_assert(15, did_wallet_3.get_confirmed_balance, 201)
@@ -439,8 +383,7 @@ async def test_did_recovery_with_multiple_backup_dids(self, self_hostname, two_w
) = await did_wallet_4.load_attest_files_for_recovery_spend([attest1, attest2])
assert message_spend_bundle == test_message_spend_bundle
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
await time_out_assert(15, did_wallet_4.get_confirmed_balance, 0)
await time_out_assert(15, did_wallet_4.get_unconfirmed_balance, 0)
await did_wallet_4.recovery_spend(coin, new_ph, test_info_list, pubkey, message_spend_bundle)
@@ -451,8 +394,7 @@ async def test_did_recovery_with_multiple_backup_dids(self, self_hostname, two_w
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
await time_out_assert(15, did_wallet_4.get_confirmed_balance, 201)
await time_out_assert(15, did_wallet_4.get_unconfirmed_balance, 201)
@@ -465,7 +407,6 @@ async def test_did_recovery_with_multiple_backup_dids(self, self_hostname, two_w
)
@pytest.mark.asyncio
async def test_did_recovery_with_empty_set(self, self_hostname, two_wallet_nodes, trusted):
- num_blocks = 5
full_nodes, wallets, _ = two_wallet_nodes
full_node_api = full_nodes[0]
server_1 = full_node_api.server
@@ -487,17 +428,7 @@ async def test_did_recovery_with_empty_set(self, self_hostname, two_wallet_nodes
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
await server_3.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
-
- funds = sum(
- [
- calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
- for i in range(1, num_blocks - 1)
- ]
- )
-
- await time_out_assert(15, wallet.get_confirmed_balance, funds)
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
async with wallet_node.wallet_state_manager.lock:
did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet(
@@ -509,8 +440,7 @@ async def test_did_recovery_with_empty_set(self, self_hostname, two_wallet_nodes
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
await time_out_assert(15, did_wallet.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet.get_unconfirmed_balance, 101)
@@ -534,7 +464,6 @@ async def test_did_recovery_with_empty_set(self, self_hostname, two_wallet_nodes
)
@pytest.mark.asyncio
async def test_did_find_lost_did(self, self_hostname, two_wallet_nodes, trusted):
- num_blocks = 5
full_nodes, wallets, _ = two_wallet_nodes
full_node_api = full_nodes[0]
server_1 = full_node_api.server
@@ -543,7 +472,6 @@ async def test_did_find_lost_did(self, self_hostname, two_wallet_nodes, trusted)
wallet = wallet_node.wallet_state_manager.main_wallet
wallet2 = wallet_node_2.wallet_state_manager.main_wallet
api_0 = WalletRpcApi(wallet_node)
- ph = await wallet.get_new_puzzlehash()
if trusted:
wallet_node.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
@@ -556,17 +484,7 @@ async def test_did_find_lost_did(self, self_hostname, two_wallet_nodes, trusted)
wallet_node_2.config["trusted_peers"] = {}
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
await server_3.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
-
- funds = sum(
- [
- calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
- for i in range(1, num_blocks - 1)
- ]
- )
-
- await time_out_assert(15, wallet.get_confirmed_balance, funds)
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
async with wallet_node.wallet_state_manager.lock:
did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet(
@@ -576,9 +494,7 @@ async def test_did_find_lost_did(self, self_hostname, two_wallet_nodes, trusted)
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(15, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- ph2 = await wallet2.get_new_puzzlehash()
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
+ await full_node_api.farm_blocks_to_wallet(1, wallet2)
await time_out_assert(15, did_wallet.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet.get_unconfirmed_balance, 101)
@@ -610,8 +526,7 @@ async def test_did_find_lost_did(self, self_hostname, two_wallet_nodes, trusted)
spend_bundle_list = await wallet_node.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(did_wallet.id())
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
+ await full_node_api.farm_blocks_to_wallet(1, wallet2)
await time_out_assert(15, did_wallet.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet.get_unconfirmed_balance, 101)
@@ -634,7 +549,6 @@ async def test_did_find_lost_did(self, self_hostname, two_wallet_nodes, trusted)
)
@pytest.mark.asyncio
async def test_did_attest_after_recovery(self, self_hostname, two_wallet_nodes, trusted):
- num_blocks = 5
full_nodes, wallets, _ = two_wallet_nodes
full_node_api = full_nodes[0]
server_1 = full_node_api.server
@@ -642,7 +556,6 @@ async def test_did_attest_after_recovery(self, self_hostname, two_wallet_nodes,
wallet_node_2, server_3 = wallets[1]
wallet = wallet_node.wallet_state_manager.main_wallet
wallet2 = wallet_node_2.wallet_state_manager.main_wallet
- ph = await wallet.get_new_puzzlehash()
if trusted:
wallet_node.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
@@ -655,17 +568,7 @@ async def test_did_attest_after_recovery(self, self_hostname, two_wallet_nodes,
wallet_node_2.config["trusted_peers"] = {}
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
await server_3.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
-
- funds = sum(
- [
- calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
- for i in range(1, num_blocks - 1)
- ]
- )
-
- await time_out_assert(15, wallet.get_confirmed_balance, funds)
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
async with wallet_node.wallet_state_manager.lock:
did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet(
@@ -675,9 +578,7 @@ async def test_did_attest_after_recovery(self, self_hostname, two_wallet_nodes,
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(15, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- ph2 = await wallet2.get_new_puzzlehash()
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
+ await full_node_api.farm_blocks_to_wallet(1, wallet2)
await time_out_assert(15, did_wallet.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet.get_unconfirmed_balance, 101)
@@ -693,9 +594,7 @@ async def test_did_attest_after_recovery(self, self_hostname, two_wallet_nodes,
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- ph = await wallet.get_new_puzzlehash()
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
await time_out_assert(25, did_wallet_2.get_confirmed_balance, 101)
await time_out_assert(25, did_wallet_2.get_unconfirmed_balance, 101)
assert did_wallet_2.did_info.backup_ids == recovery_list
@@ -711,8 +610,7 @@ async def test_did_attest_after_recovery(self, self_hostname, two_wallet_nodes,
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
+ await full_node_api.farm_blocks_to_wallet(1, wallet2)
await time_out_assert(15, did_wallet.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet.get_unconfirmed_balance, 101)
@@ -738,8 +636,7 @@ async def test_did_attest_after_recovery(self, self_hostname, two_wallet_nodes,
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
+ await full_node_api.farm_blocks_to_wallet(1, wallet2)
(
info,
@@ -753,8 +650,7 @@ async def test_did_attest_after_recovery(self, self_hostname, two_wallet_nodes,
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
await time_out_assert(15, did_wallet_3.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet_3.get_unconfirmed_balance, 101)
@@ -781,8 +677,7 @@ async def test_did_attest_after_recovery(self, self_hostname, two_wallet_nodes,
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
+ await full_node_api.farm_blocks_to_wallet(1, wallet2)
await time_out_assert(15, wallet.get_pending_change_balance, 0)
(
test_info_list,
@@ -797,8 +692,7 @@ async def test_did_attest_after_recovery(self, self_hostname, two_wallet_nodes,
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(15, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
await time_out_assert(15, did_wallet_4.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet_4.get_unconfirmed_balance, 101)
@@ -815,7 +709,6 @@ async def test_did_attest_after_recovery(self, self_hostname, two_wallet_nodes,
)
@pytest.mark.asyncio
async def test_did_transfer(self, self_hostname, two_wallet_nodes, with_recovery, trusted):
- num_blocks = 5
fee = uint64(1000)
full_nodes, wallets, _ = two_wallet_nodes
full_node_api = full_nodes[0]
@@ -839,17 +732,7 @@ async def test_did_transfer(self, self_hostname, two_wallet_nodes, with_recovery
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
await server_3.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
-
- funds = sum(
- [
- calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
- for i in range(1, num_blocks - 1)
- ]
- )
-
- await time_out_assert(15, wallet.get_confirmed_balance, funds)
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
async with wallet_node.wallet_state_manager.lock:
did_wallet_1: DIDWallet = await DIDWallet.create_new_did_wallet(
@@ -867,13 +750,9 @@ async def test_did_transfer(self, self_hostname, two_wallet_nodes, with_recovery
)
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- ph2 = await wallet2.get_new_puzzlehash()
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
+ await full_node_api.farm_blocks_to_wallet(1, wallet2)
await time_out_assert(15, did_wallet_1.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet_1.get_unconfirmed_balance, 101)
- await time_out_assert(15, wallet.get_confirmed_balance, 7999999998899)
- await time_out_assert(15, wallet.get_unconfirmed_balance, 7999999998899)
# Transfer DID
new_puzhash = await wallet2.get_new_puzzlehash()
await did_wallet_1.transfer_did(new_puzhash, fee, with_recovery)
@@ -882,11 +761,7 @@ async def test_did_transfer(self, self_hostname, two_wallet_nodes, with_recovery
)
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- ph2 = await wallet2.get_new_puzzlehash()
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
- await time_out_assert(15, wallet.get_confirmed_balance, 7999999997899)
- await time_out_assert(15, wallet.get_unconfirmed_balance, 7999999997899)
+ await full_node_api.farm_blocks_to_wallet(1, wallet2)
# Check if the DID wallet is created in the wallet2
await time_out_assert(30, get_wallet_num, 2, wallet_node_2.wallet_state_manager)
@@ -914,7 +789,6 @@ async def test_did_transfer(self, self_hostname, two_wallet_nodes, with_recovery
)
@pytest.mark.asyncio
async def test_update_recovery_list(self, self_hostname, two_wallet_nodes, trusted):
- num_blocks = 5
full_nodes, wallets, _ = two_wallet_nodes
full_node_api = full_nodes[0]
server_1 = full_node_api.server
@@ -936,17 +810,7 @@ async def test_update_recovery_list(self, self_hostname, two_wallet_nodes, trust
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
await server_3.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
-
- funds = sum(
- [
- calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
- for i in range(1, num_blocks - 1)
- ]
- )
-
- await time_out_assert(15, wallet.get_confirmed_balance, funds)
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
async with wallet_node.wallet_state_manager.lock:
did_wallet_1: DIDWallet = await DIDWallet.create_new_did_wallet(
@@ -957,16 +821,12 @@ async def test_update_recovery_list(self, self_hostname, two_wallet_nodes, trust
)
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- ph2 = await wallet.get_new_puzzlehash()
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
await time_out_assert(15, did_wallet_1.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet_1.get_unconfirmed_balance, 101)
await did_wallet_1.update_recovery_list([bytes(ph)], 1)
await did_wallet_1.create_update_spend()
- ph2 = await wallet.get_new_puzzlehash()
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
await time_out_assert(15, did_wallet_1.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet_1.get_unconfirmed_balance, 101)
assert did_wallet_1.did_info.backup_ids[0] == bytes(ph)
@@ -1017,9 +877,9 @@ async def test_get_info(self, self_hostname, two_wallet_nodes, trusted):
assert await did_wallet_1.get_confirmed_balance() == did_amount
assert await did_wallet_1.get_unconfirmed_balance() == did_amount
response = await api_0.did_get_info({"coin_id": did_wallet_1.did_info.origin_coin.name().hex()})
-
+ assert response["did_id"] == encode_puzzle_hash(did_wallet_1.did_info.origin_coin.name(), AddressType.DID.value)
assert response["launcher_id"] == did_wallet_1.did_info.origin_coin.name().hex()
- assert response["full_puzzle"] == create_fullpuz(
+ assert response["full_puzzle"] == create_singleton_puzzle(
did_wallet_1.did_info.current_inner, did_wallet_1.did_info.origin_coin.name()
)
assert response["metadata"]["twitter"] == "twitter"
@@ -1062,7 +922,6 @@ async def test_get_info(self, self_hostname, two_wallet_nodes, trusted):
)
@pytest.mark.asyncio
async def test_message_spend(self, self_hostname, two_wallet_nodes, trusted):
- num_blocks = 3
fee = uint64(1000)
full_nodes, wallets, _ = two_wallet_nodes
full_node_api = full_nodes[0]
@@ -1071,8 +930,6 @@ async def test_message_spend(self, self_hostname, two_wallet_nodes, trusted):
wallet_node_2, server_3 = wallets[1]
wallet = wallet_node.wallet_state_manager.main_wallet
wallet1 = wallet_node_2.wallet_state_manager.main_wallet
- ph = await wallet.get_new_puzzlehash()
- ph1 = await wallet1.get_new_puzzlehash()
api_0 = WalletRpcApi(wallet_node)
if trusted:
wallet_node.config["trusted_peers"] = {
@@ -1087,17 +944,8 @@ async def test_message_spend(self, self_hostname, two_wallet_nodes, trusted):
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
await server_3.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
-
- funds = sum(
- [
- calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
- for i in range(1, num_blocks - 1)
- ]
- )
- await time_out_assert(15, wallet.get_confirmed_balance, funds)
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
async with wallet_node.wallet_state_manager.lock:
did_wallet_1: DIDWallet = await DIDWallet.create_new_did_wallet(
@@ -1108,8 +956,7 @@ async def test_message_spend(self, self_hostname, two_wallet_nodes, trusted):
)
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph1))
+ await full_node_api.farm_blocks_to_wallet(1, wallet1)
await time_out_assert(15, did_wallet_1.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet_1.get_unconfirmed_balance, 101)
response = await api_0.did_message_spend(
@@ -1117,7 +964,7 @@ async def test_message_spend(self, self_hostname, two_wallet_nodes, trusted):
)
assert "spend_bundle" in response
spend = response["spend_bundle"].coin_spends[0]
- error, conditions, cost = conditions_dict_for_solution(
+ conditions = conditions_dict_for_solution(
spend.puzzle_reveal.to_program(),
spend.solution.to_program(),
wallet.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
@@ -1204,7 +1051,6 @@ async def test_update_metadata(self, self_hostname, two_wallet_nodes, trusted):
)
@pytest.mark.asyncio
async def test_did_sign_message(self, self_hostname, two_wallet_nodes, trusted):
- num_blocks = 5
fee = uint64(1000)
full_nodes, wallets, _ = two_wallet_nodes
full_node_api = full_nodes[0]
@@ -1229,17 +1075,7 @@ async def test_did_sign_message(self, self_hostname, two_wallet_nodes, trusted):
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
await server_3.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
-
- funds = sum(
- [
- calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
- for i in range(1, num_blocks - 1)
- ]
- )
-
- await time_out_assert(15, wallet.get_confirmed_balance, funds)
+ await full_node_api.farm_blocks_to_wallet(1, wallet)
async with wallet_node.wallet_state_manager.lock:
did_wallet_1: DIDWallet = await DIDWallet.create_new_did_wallet(
@@ -1257,9 +1093,7 @@ async def test_did_sign_message(self, self_hostname, two_wallet_nodes, trusted):
)
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
- ph2 = await wallet2.get_new_puzzlehash()
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph2))
+ await full_node_api.farm_blocks_to_wallet(1, wallet2)
await time_out_assert(15, did_wallet_1.get_confirmed_balance, 101)
# Test general string
message = "Hello World"
@@ -1306,7 +1140,6 @@ async def test_create_did_with_recovery_list(self, self_hostname, two_nodes_two_
to the wallet_node that the DID Wallet is being created in (client1).
"""
- num_blocks = 5
full_nodes, wallets, _ = two_nodes_two_wallets_with_same_keys
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
@@ -1342,20 +1175,8 @@ async def test_create_did_with_recovery_list(self, self_hostname, two_nodes_two_
await server_0.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph0))
-
- funds = sum(
- [
- calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
- for i in range(1, num_blocks - 1)
- ]
- )
-
- await time_out_assert(10, wallet_0.get_unconfirmed_balance, funds)
- await time_out_assert(10, wallet_0.get_confirmed_balance, funds)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph1))
+ await full_node_api.farm_blocks_to_wallet(1, wallet_0)
+ await full_node_api.farm_blocks_to_wallet(1, wallet_1)
# Node 0 sets up a DID Wallet with a backup set, but num_of_backup_ids_needed=0
# (a malformed solution, but legal for the clvm puzzle)
@@ -1378,15 +1199,13 @@ async def test_create_did_with_recovery_list(self, self_hostname, two_nodes_two_
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
# Node 1 creates the DID Wallet with create_new_did_wallet_from_coin_spend
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph0))
+ await full_node_api.farm_blocks_to_wallet(1, wallet_0)
await time_out_assert(15, did_wallet_0.get_confirmed_balance, 101)
await time_out_assert(15, did_wallet_0.get_unconfirmed_balance, 101)
await time_out_assert(15, did_wallet_0.get_pending_change_balance, 0)
- for i in range(1, num_blocks):
- await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph0))
+ await full_node_api.farm_blocks_to_wallet(1, wallet_0)
#######################
all_node_0_wallets = await wallet_node_0.wallet_state_manager.user_store.get_all_wallet_info_entries()
diff --git a/tests/wallet/nft_wallet/test_nft_1_offers.py b/tests/wallet/nft_wallet/test_nft_1_offers.py
--- a/tests/wallet/nft_wallet/test_nft_1_offers.py
+++ b/tests/wallet/nft_wallet/test_nft_1_offers.py
@@ -196,7 +196,6 @@ async def test_nft_offer_sell_nft(
taker_fee = 1
assert not mempool_not_empty(full_node_api)
peer = wallet_node_taker.get_full_node_peer()
- assert peer is not None
trade_take, tx_records = await trade_manager_taker.respond_to_offer(
old_maker_offer if forwards_compat else Offer.from_bytes(trade_make.offer), peer, fee=uint64(taker_fee)
@@ -373,7 +372,6 @@ async def test_nft_offer_request_nft(
taker_fee = 1
peer = wallet_node_taker.get_full_node_peer()
- assert peer is not None
trade_take, tx_records = await trade_manager_taker.respond_to_offer(
old_maker_offer if forwards_compat else Offer.from_bytes(trade_make.offer), peer, fee=uint64(taker_fee)
)
@@ -561,7 +559,6 @@ async def test_nft_offer_sell_did_to_did(
taker_fee = 1
peer = wallet_node_taker.get_full_node_peer()
- assert peer is not None
trade_take, tx_records = await trade_manager_taker.respond_to_offer(
old_maker_offer if forwards_compat else Offer.from_bytes(trade_make.offer), peer, fee=uint64(taker_fee)
)
@@ -774,7 +771,6 @@ async def test_nft_offer_sell_nft_for_cat(
taker_fee = 1
peer = wallet_node_taker.get_full_node_peer()
- assert peer is not None
trade_take, tx_records = await trade_manager_taker.respond_to_offer(
old_maker_offer if forwards_compat else Offer.from_bytes(trade_make.offer), peer, fee=uint64(taker_fee)
)
@@ -994,7 +990,6 @@ async def test_nft_offer_request_nft_for_cat(
taker_fee = 1
peer = wallet_node_taker.get_full_node_peer()
- assert peer is not None
trade_take, tx_records = await trade_manager_taker.respond_to_offer(
old_maker_offer if forwards_compat else Offer.from_bytes(trade_make.offer), peer, fee=uint64(taker_fee)
)
diff --git a/tests/wallet/nft_wallet/test_nft_offers.py b/tests/wallet/nft_wallet/test_nft_offers.py
--- a/tests/wallet/nft_wallet/test_nft_offers.py
+++ b/tests/wallet/nft_wallet/test_nft_offers.py
@@ -156,7 +156,6 @@ async def test_nft_offer_with_fee(
taker_fee = uint64(1)
peer = wallet_node_1.get_full_node_peer()
- assert peer is not None
trade_take, tx_records = await trade_manager_taker.respond_to_offer(
old_maker_offer if forwards_compat else Offer.from_bytes(trade_make.offer),
peer,
@@ -497,7 +496,6 @@ async def test_nft_offer_with_metadata_update(self_hostname: str, two_wallet_nod
taker_fee = uint64(1)
peer = wallet_node_1.get_full_node_peer()
- assert peer is not None
trade_take, tx_records = await trade_manager_taker.respond_to_offer(
Offer.from_bytes(trade_make.offer), peer, fee=taker_fee
)
@@ -663,7 +661,6 @@ async def test_nft_offer_nft_for_cat(
taker_fee = uint64(1)
peer = wallet_node_1.get_full_node_peer()
- assert peer is not None
trade_take, tx_records = await trade_manager_taker.respond_to_offer(
Offer.from_bytes(trade_make.offer),
peer,
@@ -887,7 +884,6 @@ async def test_nft_offer_nft_for_nft(self_hostname: str, two_wallet_nodes: Any,
taker_fee = uint64(1)
peer = wallet_node_1.get_full_node_peer()
- assert peer is not None
trade_take, tx_records = await trade_manager_taker.respond_to_offer(
Offer.from_bytes(trade_make.offer), peer, fee=taker_fee
)
diff --git a/tests/wallet/nft_wallet/test_nft_puzzles.py b/tests/wallet/nft_wallet/test_nft_puzzles.py
--- a/tests/wallet/nft_wallet/test_nft_puzzles.py
+++ b/tests/wallet/nft_wallet/test_nft_puzzles.py
@@ -20,19 +20,19 @@
from chia.wallet.uncurried_puzzle import uncurry_puzzle
from tests.core.make_block_generator import int_to_public_key
-SINGLETON_MOD = load_clvm("singleton_top_layer_v1_1.clvm")
-LAUNCHER_PUZZLE = load_clvm("singleton_launcher.clvm")
-DID_MOD = load_clvm("did_innerpuz.clvm")
-NFT_STATE_LAYER_MOD = load_clvm("nft_state_layer.clvm")
-NFT_OWNERSHIP_LAYER = load_clvm("nft_ownership_layer.clvm")
-NFT_TRANSFER_PROGRAM_DEFAULT = load_clvm("nft_ownership_transfer_program_one_way_claim_with_royalties.clvm")
+SINGLETON_MOD = load_clvm("singleton_top_layer_v1_1.clsp")
+LAUNCHER_PUZZLE = load_clvm("singleton_launcher.clsp")
+DID_MOD = load_clvm("did_innerpuz.clsp")
+NFT_STATE_LAYER_MOD = load_clvm("nft_state_layer.clsp")
+NFT_OWNERSHIP_LAYER = load_clvm("nft_ownership_layer.clsp")
+NFT_TRANSFER_PROGRAM_DEFAULT = load_clvm("nft_ownership_transfer_program_one_way_claim_with_royalties.clsp")
LAUNCHER_PUZZLE_HASH = LAUNCHER_PUZZLE.get_tree_hash()
NFT_STATE_LAYER_MOD_HASH = NFT_STATE_LAYER_MOD.get_tree_hash()
SINGLETON_MOD_HASH = SINGLETON_MOD.get_tree_hash()
-OFFER_MOD = load_clvm("settlement_payments.clvm")
+OFFER_MOD = load_clvm("settlement_payments.clsp")
LAUNCHER_ID = Program.to(b"launcher-id").get_tree_hash()
-NFT_METADATA_UPDATER_DEFAULT = load_clvm("nft_metadata_updater_default.clvm")
+NFT_METADATA_UPDATER_DEFAULT = load_clvm("nft_metadata_updater_default.clsp")
def test_nft_transfer_puzzle_hashes():
diff --git a/tests/wallet/rpc/test_wallet_rpc.py b/tests/wallet/rpc/test_wallet_rpc.py
--- a/tests/wallet/rpc/test_wallet_rpc.py
+++ b/tests/wallet/rpc/test_wallet_rpc.py
@@ -49,6 +49,8 @@
from chia.wallet.uncurried_puzzle import uncurry_puzzle
from chia.wallet.util.address_type import AddressType
from chia.wallet.util.compute_memos import compute_memos
+from chia.wallet.util.query_filter import TransactionTypeFilter
+from chia.wallet.util.transaction_type import TransactionType
from chia.wallet.util.wallet_types import WalletType
from chia.wallet.wallet import Wallet
from chia.wallet.wallet_node import WalletNode
@@ -201,7 +203,7 @@ def assert_tx_amounts(
) -> None:
assert tx.fee_amount == amount_fee
assert tx.amount == sum(output["amount"] for output in outputs)
- expected_additions = len(outputs) if change_expected is None else len(outputs) + 1
+ expected_additions = len(outputs) + 1 if change_expected else len(outputs)
if is_cat and amount_fee:
expected_additions += 1
assert len(tx.additions) == expected_additions
@@ -224,6 +226,18 @@ async def assert_push_tx_error(node_rpc: FullNodeRpcClient, tx: TransactionRecor
raise ValueError from error
+async def assert_get_balance(rpc_client: WalletRpcClient, wallet_node: WalletNode, wallet: WalletProtocol) -> None:
+ expected_balance = await wallet_node.get_balance(wallet.id())
+ expected_balance_dict = expected_balance.to_json_dict()
+ expected_balance_dict["wallet_id"] = wallet.id()
+ expected_balance_dict["wallet_type"] = wallet.type()
+ expected_balance_dict["fingerprint"] = wallet_node.logged_in_fingerprint
+ if wallet.type() == WalletType.CAT:
+ assert isinstance(wallet, CATWallet)
+ expected_balance_dict["asset_id"] = wallet.get_asset_id()
+ assert await rpc_client.get_wallet_balance(wallet.id()) == expected_balance_dict
+
+
async def tx_in_mempool(client: WalletRpcClient, transaction_id: bytes32):
tx = await client.get_transaction(1, transaction_id)
return tx.is_in_mempool()
@@ -310,6 +324,71 @@ async def test_push_transactions(wallet_rpc_environment: WalletRpcTestEnvironmen
assert tx.confirmed
+@pytest.mark.asyncio
+async def test_get_balance(wallet_rpc_environment: WalletRpcTestEnvironment):
+ env = wallet_rpc_environment
+ wallet: Wallet = env.wallet_1.wallet
+ wallet_node: WalletNode = env.wallet_1.node
+ full_node_api: FullNodeSimulator = env.full_node.api
+ wallet_rpc_client = env.wallet_1.rpc_client
+ await full_node_api.farm_blocks_to_wallet(2, wallet)
+ async with wallet_node.wallet_state_manager.lock:
+ cat_wallet: CATWallet = await CATWallet.create_new_cat_wallet(
+ wallet_node.wallet_state_manager, wallet, {"identifier": "genesis_by_id"}, uint64(100)
+ )
+ await assert_get_balance(wallet_rpc_client, wallet_node, wallet)
+ await assert_get_balance(wallet_rpc_client, wallet_node, cat_wallet)
+
+
+@pytest.mark.asyncio
+async def test_get_farmed_amount(wallet_rpc_environment: WalletRpcTestEnvironment):
+ env = wallet_rpc_environment
+ wallet: Wallet = env.wallet_1.wallet
+ full_node_api: FullNodeSimulator = env.full_node.api
+ wallet_rpc_client = env.wallet_1.rpc_client
+ await full_node_api.farm_blocks_to_wallet(2, wallet)
+
+ result = await wallet_rpc_client.get_farmed_amount()
+
+ expected_result = {
+ "farmed_amount": 4_000_000_000_000,
+ "farmer_reward_amount": 500_000_000_000,
+ "fee_amount": 0,
+ "last_height_farmed": 2,
+ "pool_reward_amount": 3_500_000_000_000,
+ "success": True,
+ }
+
+ assert result == expected_result
+
+
+@pytest.mark.asyncio
+async def test_get_farmed_amount_with_fee(wallet_rpc_environment: WalletRpcTestEnvironment):
+ env = wallet_rpc_environment
+ wallet: Wallet = env.wallet_1.wallet
+ full_node_api: FullNodeSimulator = env.full_node.api
+ wallet_rpc_client = env.wallet_1.rpc_client
+ wallet_node: WalletNode = env.wallet_1.node
+
+ await generate_funds(full_node_api, env.wallet_1)
+
+ fee_amount = 100
+ tx = await wallet.generate_signed_transaction(
+ amount=uint64(5),
+ puzzle_hash=bytes32([0] * 32),
+ fee=uint64(fee_amount),
+ )
+ await wallet.push_transaction(tx)
+
+ our_ph = await wallet.get_new_puzzlehash()
+ await full_node_api.wait_transaction_records_entered_mempool(records=[tx])
+ await full_node_api.farm_blocks_to_puzzlehash(count=2, farm_to=our_ph, guarantee_transaction_blocks=True)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20)
+
+ result = await wallet_rpc_client.get_farmed_amount()
+ assert result["fee_amount"] == fee_amount
+
+
@pytest.mark.asyncio
async def test_get_timestamp_for_height(wallet_rpc_environment: WalletRpcTestEnvironment):
env: WalletRpcTestEnvironment = wallet_rpc_environment
@@ -336,6 +415,7 @@ async def test_get_timestamp_for_height(wallet_rpc_environment: WalletRpcTestEnv
False,
),
([(1337, ["LEET"]), (81000, ["pingwei"])], 817, False, True),
+ ([(120000000000, None), (120000000000, None)], 10000000000, True, False),
],
)
@pytest.mark.asyncio
@@ -390,7 +470,8 @@ async def test_create_signed_transaction(
fee=amount_fee,
wallet_id=wallet_id,
)
- assert_tx_amounts(tx, outputs, amount_fee=amount_fee, change_expected=not select_coin, is_cat=is_cat)
+ change_expected = not selected_coin or selected_coin[0].amount - amount_total > 0
+ assert_tx_amounts(tx, outputs, amount_fee=amount_fee, change_expected=change_expected, is_cat=is_cat)
# Farm the transaction and make sure the wallet balance reflects it correct
spend_bundle = tx.spend_bundle
@@ -445,7 +526,7 @@ async def test_create_signed_transaction_with_coin_announcement(wallet_rpc_envir
tx_res: TransactionRecord = await client.create_signed_transaction(
outputs, coin_announcements=tx_coin_announcements
)
- assert_tx_amounts(tx_res, outputs, amount_fee=uint64(0), change_expected=False)
+ assert_tx_amounts(tx_res, outputs, amount_fee=uint64(0), change_expected=True)
await assert_push_tx_error(client_node, tx_res)
@@ -609,6 +690,13 @@ async def test_get_transactions(wallet_rpc_environment: WalletRpcTestEnvironment
assert len(tx_for_address) == 1
assert tx_for_address[0].to_puzzle_hash == ph_by_addr
+ # Test type filter
+ all_transactions = await client.get_transactions(
+ 1, type_filter=TransactionTypeFilter.include([TransactionType.COINBASE_REWARD])
+ )
+ assert len(all_transactions) == 5
+ assert all(transaction.type == TransactionType.COINBASE_REWARD for transaction in all_transactions)
+
@pytest.mark.asyncio
async def test_get_transaction_count(wallet_rpc_environment: WalletRpcTestEnvironment):
diff --git a/tests/wallet/sync/test_wallet_sync.py b/tests/wallet/sync/test_wallet_sync.py
--- a/tests/wallet/sync/test_wallet_sync.py
+++ b/tests/wallet/sync/test_wallet_sync.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import asyncio
+import functools
import logging
from typing import List, Optional, Set
from unittest.mock import MagicMock
@@ -11,7 +12,6 @@
from chia.consensus.block_record import BlockRecord
from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
-from chia.full_node.mempool_manager import MempoolManager
from chia.full_node.weight_proof import WeightProofHandler
from chia.protocols import full_node_protocol, wallet_protocol
from chia.protocols.protocol_message_types import ProtocolMessageTypes
@@ -28,10 +28,10 @@
from chia.util.hash import std_hash
from chia.util.ints import uint16, uint32, uint64
from chia.wallet.nft_wallet.nft_wallet import NFTWallet
+from chia.wallet.payment import Payment
from chia.wallet.transaction_record import TransactionRecord
from chia.wallet.util.compute_memos import compute_memos
from chia.wallet.util.wallet_sync_utils import PeerRequestException
-from chia.wallet.util.wallet_types import AmountWithPuzzlehash
from chia.wallet.wallet_coin_record import WalletCoinRecord
from chia.wallet.wallet_weight_proof_handler import get_wp_fork_point
from tests.connection_utils import disconnect_all, disconnect_all_and_reconnect
@@ -508,11 +508,11 @@ async def test_request_additions_success(self, simulator_and_wallet, self_hostna
await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20)
- payees: List[AmountWithPuzzlehash] = []
+ payees: List[Payment] = []
for i in range(10):
payee_ph = await wallet.get_new_puzzlehash()
- payees.append({"amount": uint64(i + 100), "puzzlehash": payee_ph, "memos": []})
- payees.append({"amount": uint64(i + 200), "puzzlehash": payee_ph, "memos": []})
+ payees.append(Payment(payee_ph, uint64(i + 100)))
+ payees.append(Payment(payee_ph, uint64(i + 200)))
tx: TransactionRecord = await wallet.generate_signed_transaction(uint64(0), ph, primaries=payees)
await full_node_api.send_transaction(SendTransaction(tx.spend_bundle))
@@ -527,7 +527,7 @@ async def test_request_additions_success(self, simulator_and_wallet, self_hostna
RequestAdditions(
last_block.height,
None,
- [payees[0]["puzzlehash"], payees[2]["puzzlehash"], std_hash(b"1")],
+ [payees[0].puzzle_hash, payees[2].puzzle_hash, std_hash(b"1")],
)
)
@@ -539,7 +539,7 @@ async def test_request_additions_success(self, simulator_and_wallet, self_hostna
# First two PHs are included
for i in range(2):
- assert response.proofs[i][0] in {payees[j]["puzzlehash"] for j in (0, 2)}
+ assert response.proofs[i][0] in {payees[j].puzzle_hash for j in (0, 2)}
assert response.proofs[i][1] is not None
assert response.proofs[i][2] is not None
@@ -718,9 +718,9 @@ async def test_dusted_wallet(
await full_node_api.wait_for_wallets_synced(wallet_nodes=[farm_wallet_node, dust_wallet_node], timeout=20)
# Part 1: create a single dust coin
- payees: List[AmountWithPuzzlehash] = []
+ payees: List[Payment] = []
payee_ph = await dust_wallet.get_new_puzzlehash()
- payees.append({"amount": uint64(dust_value), "puzzlehash": payee_ph, "memos": []})
+ payees.append(Payment(payee_ph, uint64(dust_value)))
# construct and send tx
tx: TransactionRecord = await farm_wallet.generate_signed_transaction(uint64(0), ph, primaries=payees)
@@ -769,7 +769,7 @@ async def test_dusted_wallet(
# Part 2: Create dust coins until the filter threshold has been reached.
# Nothing should be filtered yet (unless spam_filter_after_n_txs is 0).
- payees: List[AmountWithPuzzlehash] = []
+ payees = []
# Determine how much dust to create, recalling that there already is one dust coin.
new_dust = spam_filter_after_n_txs - 1
@@ -777,7 +777,7 @@ async def test_dusted_wallet(
while dust_remaining > 0:
payee_ph = await dust_wallet.get_new_puzzlehash()
- payees.append({"amount": uint64(dust_value), "puzzlehash": payee_ph, "memos": []})
+ payees.append(Payment(payee_ph, uint64(dust_value)))
# After every 100 (at most) coins added, push the tx and advance the chain
# This greatly speeds up the overall process
@@ -789,7 +789,7 @@ async def test_dusted_wallet(
last_block: Optional[BlockRecord] = full_node_api.full_node.blockchain.get_peak()
assert last_block is not None
# reset payees
- payees: List[AmountWithPuzzlehash] = []
+ payees = []
dust_remaining -= 1
@@ -836,11 +836,11 @@ async def test_dusted_wallet(
# These should not get filtered.
large_coins = 10
- payees: List[AmountWithPuzzlehash] = []
+ payees = []
for i in range(large_coins):
payee_ph = await dust_wallet.get_new_puzzlehash()
- payees.append({"amount": uint64(xch_spam_amount), "puzzlehash": payee_ph, "memos": []})
+ payees.append(Payment(payee_ph, uint64(xch_spam_amount)))
# construct and send tx
tx: TransactionRecord = await farm_wallet.generate_signed_transaction(uint64(0), ph, primaries=payees)
@@ -876,10 +876,10 @@ async def test_dusted_wallet(
assert num_coins == dust_coins + large_coins + large_dust_coins
# Part 4: Create one more dust coin to test the threshold
- payees: List[AmountWithPuzzlehash] = []
+ payees = []
payee_ph = await dust_wallet.get_new_puzzlehash()
- payees.append({"amount": uint64(dust_value), "puzzlehash": payee_ph, "memos": []})
+ payees.append(Payment(payee_ph, uint64(dust_value)))
# construct and send tx
tx: TransactionRecord = await farm_wallet.generate_signed_transaction(uint64(0), ph, primaries=payees)
@@ -917,13 +917,13 @@ async def test_dusted_wallet(
# Part 5: Create 5 coins below the threshold and 5 at or above.
# Those below the threshold should get filtered, and those above should not.
- payees: List[AmountWithPuzzlehash] = []
+ payees = []
for i in range(5):
payee_ph = await dust_wallet.get_new_puzzlehash()
# Create a large coin and add on the appropriate balance.
- payees.append({"amount": uint64(xch_spam_amount + i), "puzzlehash": payee_ph, "memos": []})
+ payees.append(Payment(payee_ph, uint64(xch_spam_amount + i)))
large_coins += 1
large_coin_balance += xch_spam_amount + i
@@ -931,9 +931,9 @@ async def test_dusted_wallet(
# Make sure we are always creating coins with a positive value.
if xch_spam_amount - dust_value - i > 0:
- payees.append({"amount": uint64(xch_spam_amount - dust_value - i), "puzzlehash": payee_ph, "memos": []})
+ payees.append(Payment(payee_ph, uint64(xch_spam_amount - dust_value - i)))
else:
- payees.append({"amount": uint64(dust_value), "puzzlehash": payee_ph, "memos": []})
+ payees.append(Payment(payee_ph, uint64(dust_value)))
# In cases where xch_spam_amount is sufficiently low,
# the new dust should be considered a large coina and not be filtered.
if xch_spam_amount <= dust_value:
@@ -974,7 +974,7 @@ async def test_dusted_wallet(
# Send 1 mojo from the dust wallet. The dust wallet should receive a change coin valued at "xch_spam_amount-1".
payee_ph = await farm_wallet.get_new_puzzlehash()
- payees: List[AmountWithPuzzlehash] = [{"amount": uint64(balance), "puzzlehash": payee_ph, "memos": []}]
+ payees = [Payment(payee_ph, uint64(balance))]
# construct and send tx
tx: TransactionRecord = await dust_wallet.generate_signed_transaction(uint64(0), ph, primaries=payees)
@@ -1014,7 +1014,7 @@ async def test_dusted_wallet(
while coins_remaining > 0:
payee_ph = await dust_wallet.get_new_puzzlehash()
- payees.append({"amount": uint64(coin_value), "puzzlehash": payee_ph, "memos": []})
+ payees.append(Payment(payee_ph, uint64(coin_value)))
# After every 100 (at most) coins added, push the tx and advance the chain
# This greatly speeds up the overall process
@@ -1026,7 +1026,7 @@ async def test_dusted_wallet(
last_block: Optional[BlockRecord] = full_node_api.full_node.blockchain.get_peak()
assert last_block is not None
# reset payees
- payees: List[AmountWithPuzzlehash] = []
+ payees = []
coins_remaining -= 1
@@ -1057,7 +1057,7 @@ async def test_dusted_wallet(
# Send a 1 mojo coin from the dust wallet to the farm wallet
payee_ph = await farm_wallet.get_new_puzzlehash()
- payees: List[AmountWithPuzzlehash] = [{"amount": uint64(1), "puzzlehash": payee_ph, "memos": []}]
+ payees = [Payment(payee_ph, uint64(1))]
# construct and send tx
tx: TransactionRecord = await dust_wallet.generate_signed_transaction(uint64(0), ph, primaries=payees)
@@ -1185,11 +1185,17 @@ async def new_func(*args, **kwargs):
return new_func
- def flaky_fetch_puzzle_solution(node, func):
+ request_puzzle_solution_failure_tested = False
+
+ def flaky_request_puzzle_solution(func):
+ @functools.wraps(func)
async def new_func(*args, **kwargs):
- if node.puzzle_solution_flaky:
- node.puzzle_solution_flaky = False
- raise PeerRequestException()
+ nonlocal request_puzzle_solution_failure_tested
+ if not request_puzzle_solution_failure_tested:
+ request_puzzle_solution_failure_tested = True
+ # This can just return None if we have `none_response` enabled.
+ reject = wallet_protocol.RejectPuzzleSolution(bytes32([0] * 32), uint32(0))
+ return make_msg(ProtocolMessageTypes.reject_puzzle_solution, reject)
else:
return await func(*args, **kwargs)
@@ -1225,23 +1231,25 @@ async def new_func(*args, **kwargs):
return new_func
+ full_node_api.request_puzzle_solution = flaky_request_puzzle_solution(full_node_api.request_puzzle_solution)
+
for wallet_node, wallet_server in wallets:
+ wallet_node.coin_state_retry_seconds = 1
+ request_puzzle_solution_failure_tested = False
wallet_node.coin_state_flaky = True
- wallet_node.puzzle_solution_flaky = True
wallet_node.fetch_children_flaky = True
wallet_node.get_timestamp_flaky = True
wallet_node.db_flaky = True
wallet_node.get_coin_state = flaky_get_coin_state(wallet_node, wallet_node.get_coin_state)
- wallet_node.fetch_puzzle_solution = flaky_fetch_puzzle_solution(
- wallet_node, wallet_node.fetch_puzzle_solution
- )
wallet_node.fetch_children = flaky_fetch_children(wallet_node, wallet_node.fetch_children)
wallet_node.get_timestamp_for_height = flaky_get_timestamp(
wallet_node, wallet_node.get_timestamp_for_height
)
- wallet_node.wallet_state_manager.puzzle_store.wallet_info_for_puzzle_hash = flaky_info_for_puzhash(
- wallet_node, wallet_node.wallet_state_manager.puzzle_store.wallet_info_for_puzzle_hash
+ wallet_node.wallet_state_manager.puzzle_store.get_wallet_identifier_for_puzzle_hash = (
+ flaky_info_for_puzhash(
+ wallet_node, wallet_node.wallet_state_manager.puzzle_store.get_wallet_identifier_for_puzzle_hash
+ )
)
await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
@@ -1251,38 +1259,32 @@ async def new_func(*args, **kwargs):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32)))
- async def len_gt_0(func, *args):
- return len((await func(*args))) > 0
+ async def retry_store_empty() -> bool:
+ return len(await wallet_node.wallet_state_manager.retry_store.get_all_states_to_retry()) == 0
- await time_out_assert(
- 15, len_gt_0, True, wallet_node.wallet_state_manager.retry_store.get_all_states_to_retry
- )
- await time_out_assert(
- 30, len_gt_0, False, wallet_node.wallet_state_manager.retry_store.get_all_states_to_retry
- )
+ async def assert_coin_state_retry() -> None:
+ # Wait for retry coin states to show up
+ await time_out_assert(15, retry_store_empty, False)
+ # And become retried/removed
+ await time_out_assert(30, retry_store_empty, True)
+
+ await assert_coin_state_retry()
await time_out_assert(30, wallet.get_confirmed_balance, 2_000_000_000_000)
tx = await wallet.generate_signed_transaction(1_000_000_000_000, bytes32([0] * 32), memos=[ph])
await wallet_node.wallet_state_manager.add_pending_transaction(tx)
- async def tx_in_pool(mempool: MempoolManager, tx_id: bytes32):
- tx = mempool.get_spendbundle(tx_id)
- if tx is None:
- return False
- return True
+ async def tx_in_mempool():
+ return full_node_api.full_node.mempool_manager.get_spendbundle(tx.name) is not None
- await time_out_assert(15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx.name)
+ await time_out_assert(15, tx_in_mempool)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32)))
- await time_out_assert(
- 15, len_gt_0, True, wallet_node.wallet_state_manager.retry_store.get_all_states_to_retry
- )
- await time_out_assert(
- 120, len_gt_0, False, wallet_node.wallet_state_manager.retry_store.get_all_states_to_retry
- )
+ await assert_coin_state_retry()
+
assert not wallet_node.coin_state_flaky
- assert not wallet_node.puzzle_solution_flaky
+ assert request_puzzle_solution_failure_tested
assert not wallet_node.fetch_children_flaky
assert not wallet_node.get_timestamp_flaky
assert not wallet_node.db_flaky
@@ -1367,6 +1369,9 @@ def wallet_syncing() -> bool:
def check_sync_canceled() -> bool:
return sync_canceled
+ def synced_to_trusted() -> bool:
+ return trusted_full_node_server.node_id in wallet_node.synced_peers
+
def only_trusted_peer() -> bool:
trusted_peers = sum([wallet_node.is_trusted(peer) for peer in wallet_server.all_connections.values()])
untrusted_peers = sum([not wallet_node.is_trusted(peer) for peer in wallet_server.all_connections.values()])
@@ -1388,7 +1393,7 @@ def only_trusted_peer() -> bool:
# Connect to the trusted peer and make sure the running untrusted long sync gets interrupted via disconnect
await wallet_server.start_client(PeerInfo(self_hostname, uint16(trusted_full_node_server._port)), None)
await time_out_assert(600, wallet_height_at_least, True, wallet_node, len(default_400_blocks) - 1)
- assert trusted_full_node_server.node_id in wallet_node.synced_peers
+ assert time_out_assert(10, synced_to_trusted)
assert untrusted_full_node_server.node_id not in wallet_node.synced_peers
assert "Connected to a a synced trusted peer, disconnecting from all untrusted nodes." in caplog.text
diff --git a/tests/wallet/test_nft_store.py b/tests/wallet/test_nft_store.py
--- a/tests/wallet/test_nft_store.py
+++ b/tests/wallet/test_nft_store.py
@@ -1,5 +1,9 @@
from __future__ import annotations
+from dataclasses import dataclass, field
+from secrets import token_bytes
+from typing import Dict, List
+
import pytest
from chia.types.blockchain_format.coin import Coin
@@ -12,6 +16,26 @@
from tests.util.db_connection import DBConnection
+def get_dummy_nft() -> NFTCoinInfo:
+ return NFTCoinInfo(
+ bytes32(token_bytes(32)),
+ Coin(bytes32(token_bytes(32)), bytes32(token_bytes(32)), uint64(1)),
+ LineageProof(bytes32(token_bytes(32)), bytes32(token_bytes(32)), uint64(1)),
+ Program.to(["A Test puzzle"]),
+ uint32(1),
+ )
+
+
+@dataclass
+class DummyNFTs:
+ nfts_per_wallet: Dict[uint32, List[NFTCoinInfo]] = field(default_factory=dict)
+
+ def generate(self, wallet_id: int, count: int) -> None:
+ nfts = self.nfts_per_wallet.setdefault(uint32(wallet_id), [])
+ for _ in range(count):
+ nfts.append(get_dummy_nft())
+
+
class TestNftStore:
@pytest.mark.asyncio
async def test_nft_insert(self) -> None:
@@ -144,3 +168,25 @@ async def test_nft_reorg(self) -> None:
await db.rollback_to_block(-1)
assert await db.count(wallet_id=uint32(1)) == 0
assert await db.is_empty(wallet_id=uint32(1))
+
+
+@pytest.mark.asyncio
+async def test_delete_wallet() -> None:
+ dummy_nfts = DummyNFTs()
+ for i in range(5):
+ dummy_nfts.generate(i, i * 5)
+ async with DBConnection(1) as wrapper:
+ db = await WalletNftStore.create(wrapper)
+ # Add the nfts per wallet and verify them
+ for wallet_id, nfts in dummy_nfts.nfts_per_wallet.items():
+ for nft in nfts:
+ await db.save_nft(wallet_id, None, nft)
+ assert await db.count(wallet_id) == len(nfts)
+ # Remove one wallet after the other and verify before and after each
+ for wallet_id, nfts in dummy_nfts.nfts_per_wallet.items():
+ # Assert the length again here to make sure the previous removals did not affect other wallet_ids
+ assert await db.count(wallet_id) == len(nfts)
+ await db.delete_wallet(wallet_id)
+ assert await db.count(wallet_id) == 0
+
+ assert await db.is_empty()
diff --git a/tests/wallet/test_offer_parsing_performance.py b/tests/wallet/test_offer_parsing_performance.py
--- a/tests/wallet/test_offer_parsing_performance.py
+++ b/tests/wallet/test_offer_parsing_performance.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import cProfile
+import sys
from contextlib import contextmanager
from typing import Iterator
@@ -17,6 +18,9 @@
@contextmanager
def enable_profiler(name: str) -> Iterator[None]:
+ if sys.version_info < (3, 8):
+ raise Exception(f"Python 3.8 or higher required, running with: {sys.version}")
+
if not with_profile:
yield
return
diff --git a/tests/wallet/test_puzzle_store.py b/tests/wallet/test_puzzle_store.py
--- a/tests/wallet/test_puzzle_store.py
+++ b/tests/wallet/test_puzzle_store.py
@@ -1,26 +1,50 @@
from __future__ import annotations
+from dataclasses import dataclass, field
from secrets import token_bytes
+from typing import Dict, List
import pytest
from blspy import AugSchemeMPL
+from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.ints import uint32
from chia.wallet.derivation_record import DerivationRecord
-from chia.wallet.util.wallet_types import WalletType
+from chia.wallet.util.wallet_types import WalletIdentifier, WalletType
from chia.wallet.wallet_puzzle_store import WalletPuzzleStore
from tests.util.db_connection import DBConnection
+def get_dummy_record(index: int, wallet_id: int) -> DerivationRecord:
+ return DerivationRecord(
+ uint32(index),
+ bytes32(token_bytes(32)),
+ AugSchemeMPL.key_gen(token_bytes(32)).get_g1(),
+ WalletType.STANDARD_WALLET,
+ uint32(wallet_id),
+ False,
+ )
+
+
+@dataclass
+class DummyDerivationRecords:
+ index_per_wallet: Dict[int, int] = field(default_factory=dict)
+ records_per_wallet: Dict[int, List[DerivationRecord]] = field(default_factory=dict)
+
+ def generate(self, wallet_id: int, count: int) -> None:
+ records = self.records_per_wallet.setdefault(wallet_id, [])
+ self.index_per_wallet.setdefault(wallet_id, 0)
+ for _ in range(count):
+ records.append(get_dummy_record(self.index_per_wallet[wallet_id], wallet_id))
+ self.index_per_wallet[wallet_id] += 1
+
+
class TestPuzzleStore:
@pytest.mark.asyncio
async def test_puzzle_store(self):
async with DBConnection(1) as wrapper:
db = await WalletPuzzleStore.create(wrapper)
derivation_recs = []
- # wallet_types = [t for t in WalletType]
- [t for t in WalletType]
-
for i in range(1000):
derivation_recs.append(
DerivationRecord(
@@ -45,7 +69,7 @@ async def test_puzzle_store(self):
assert await db.puzzle_hash_exists(derivation_recs[0].puzzle_hash) is False
assert await db.index_for_pubkey(derivation_recs[0].pubkey) is None
assert await db.index_for_puzzle_hash(derivation_recs[2].puzzle_hash) is None
- assert await db.wallet_info_for_puzzle_hash(derivation_recs[2].puzzle_hash) is None
+ assert await db.get_wallet_identifier_for_puzzle_hash(derivation_recs[2].puzzle_hash) is None
assert len((await db.get_all_puzzle_hashes())) == 0
assert await db.get_last_derivation_path() is None
assert await db.get_unused_derivation_path() is None
@@ -57,7 +81,7 @@ async def test_puzzle_store(self):
assert await db.index_for_pubkey(derivation_recs[4].pubkey) == 2
assert await db.index_for_puzzle_hash(derivation_recs[2].puzzle_hash) == 1
- assert await db.wallet_info_for_puzzle_hash(derivation_recs[2].puzzle_hash) == (
+ assert await db.get_wallet_identifier_for_puzzle_hash(derivation_recs[2].puzzle_hash) == WalletIdentifier(
derivation_recs[2].wallet_id,
derivation_recs[2].wallet_type,
)
@@ -70,3 +94,38 @@ async def test_puzzle_store(self):
await db.set_used_up_to(249)
assert await db.get_unused_derivation_path() == 250
+
+
+@pytest.mark.asyncio
+async def test_delete_wallet() -> None:
+ dummy_records = DummyDerivationRecords()
+ for i in range(5):
+ dummy_records.generate(i, i * 5)
+ async with DBConnection(1) as wrapper:
+ db = await WalletPuzzleStore.create(wrapper)
+ # Add the records per wallet and verify them
+ for wallet_id, records in dummy_records.records_per_wallet.items():
+ await db.add_derivation_paths(records)
+ for record in records:
+ assert await db.get_derivation_record(record.index, record.wallet_id, record.hardened) == record
+ assert await db.get_wallet_identifier_for_puzzle_hash(record.puzzle_hash) == WalletIdentifier(
+ record.wallet_id, record.wallet_type
+ )
+ # Remove one wallet after the other and verify before and after each
+ for wallet_id, records in dummy_records.records_per_wallet.items():
+ # Assert the existence again here to make sure the previous removals did not affect other wallet_ids
+ for record in records:
+ assert await db.get_derivation_record(record.index, record.wallet_id, record.hardened) == record
+ assert await db.get_wallet_identifier_for_puzzle_hash(record.puzzle_hash) == WalletIdentifier(
+ record.wallet_id, record.wallet_type
+ )
+ assert await db.get_last_derivation_path_for_wallet(wallet_id) is not None
+ # Remove the wallet_id and make sure its removed fully
+ await db.delete_wallet(wallet_id)
+ for record in records:
+ assert await db.get_derivation_record(record.index, record.wallet_id, record.hardened) is None
+ assert await db.get_wallet_identifier_for_puzzle_hash(record.puzzle_hash) is None
+ assert await db.get_last_derivation_path_for_wallet(wallet_id) is None
+ assert await db.get_last_derivation_path() is None
+ assert db.last_derivation_index is None
+ assert len(db.last_wallet_derivation_index) == 0
diff --git a/tests/wallet/test_singleton.py b/tests/wallet/test_singleton.py
--- a/tests/wallet/test_singleton.py
+++ b/tests/wallet/test_singleton.py
@@ -8,11 +8,11 @@
from chia.util.condition_tools import parse_sexp_to_conditions
from chia.wallet.puzzles.load_clvm import load_clvm
-SINGLETON_MOD = load_clvm("singleton_top_layer.clvm")
-LAUNCHER_PUZZLE = load_clvm("singleton_launcher.clvm")
-P2_SINGLETON_MOD = load_clvm("p2_singleton.clvm")
-POOL_MEMBER_MOD = load_clvm("pool_member_innerpuz.clvm")
-POOL_WAITINGROOM_MOD = load_clvm("pool_waitingroom_innerpuz.clvm")
+SINGLETON_MOD = load_clvm("singleton_top_layer.clsp")
+LAUNCHER_PUZZLE = load_clvm("singleton_launcher.clsp")
+P2_SINGLETON_MOD = load_clvm("p2_singleton.clsp")
+POOL_MEMBER_MOD = load_clvm("pool_member_innerpuz.clsp")
+POOL_WAITINGROOM_MOD = load_clvm("pool_waitingroom_innerpuz.clsp")
LAUNCHER_PUZZLE_HASH = LAUNCHER_PUZZLE.get_tree_hash()
SINGLETON_MOD_HASH = SINGLETON_MOD.get_tree_hash()
@@ -118,8 +118,7 @@ def test_p2_singleton():
p2_singleton_full = p2_singleton_puzzle(launcher_id, LAUNCHER_PUZZLE_HASH)
solution = Program.to([innerpuz.get_tree_hash(), p2_singleton_coin_id])
cost, result = p2_singleton_full.run_with_cost(INFINITE_COST, solution)
- err, conditions = parse_sexp_to_conditions(result)
- assert err is None
+ conditions = parse_sexp_to_conditions(result)
p2_singleton_full = p2_singleton_puzzle(launcher_id, LAUNCHER_PUZZLE_HASH)
solution = Program.to([innerpuz.get_tree_hash(), p2_singleton_coin_id])
diff --git a/tests/wallet/test_singleton_lifecycle.py b/tests/wallet/test_singleton_lifecycle.py
--- a/tests/wallet/test_singleton_lifecycle.py
+++ b/tests/wallet/test_singleton_lifecycle.py
@@ -17,11 +17,11 @@
from chia.wallet.puzzles.load_clvm import load_clvm
from tests.core.full_node.test_conditions import check_spend_bundle_validity, initial_blocks
-SINGLETON_MOD = load_clvm("singleton_top_layer.clvm")
-LAUNCHER_PUZZLE = load_clvm("singleton_launcher.clvm")
-P2_SINGLETON_MOD = load_clvm("p2_singleton.clvm")
-POOL_MEMBER_MOD = load_clvm("pool_member_innerpuz.clvm")
-POOL_WAITINGROOM_MOD = load_clvm("pool_waitingroom_innerpuz.clvm")
+SINGLETON_MOD = load_clvm("singleton_top_layer.clsp")
+LAUNCHER_PUZZLE = load_clvm("singleton_launcher.clsp")
+P2_SINGLETON_MOD = load_clvm("p2_singleton.clsp")
+POOL_MEMBER_MOD = load_clvm("pool_member_innerpuz.clsp")
+POOL_WAITINGROOM_MOD = load_clvm("pool_waitingroom_innerpuz.clsp")
LAUNCHER_PUZZLE_HASH = LAUNCHER_PUZZLE.get_tree_hash()
SINGLETON_MOD_HASH = SINGLETON_MOD.get_tree_hash()
diff --git a/tests/wallet/test_singleton_lifecycle_fast.py b/tests/wallet/test_singleton_lifecycle_fast.py
--- a/tests/wallet/test_singleton_lifecycle_fast.py
+++ b/tests/wallet/test_singleton_lifecycle_fast.py
@@ -18,11 +18,11 @@
from chia.wallet.puzzles.load_clvm import load_clvm
from tests.clvm.coin_store import BadSpendBundleError, CoinStore, CoinTimestamp
-SINGLETON_MOD = load_clvm("singleton_top_layer.clvm")
-LAUNCHER_PUZZLE = load_clvm("singleton_launcher.clvm")
-P2_SINGLETON_MOD = load_clvm("p2_singleton_or_delayed_puzhash.clvm")
-POOL_MEMBER_MOD = load_clvm("pool_member_innerpuz.clvm")
-POOL_WAITINGROOM_MOD = load_clvm("pool_waitingroom_innerpuz.clvm")
+SINGLETON_MOD = load_clvm("singleton_top_layer.clsp")
+LAUNCHER_PUZZLE = load_clvm("singleton_launcher.clsp")
+P2_SINGLETON_MOD = load_clvm("p2_singleton_or_delayed_puzhash.clsp")
+POOL_MEMBER_MOD = load_clvm("pool_member_innerpuz.clsp")
+POOL_WAITINGROOM_MOD = load_clvm("pool_waitingroom_innerpuz.clsp")
LAUNCHER_PUZZLE_HASH = LAUNCHER_PUZZLE.get_tree_hash()
SINGLETON_MOD_HASH = SINGLETON_MOD.get_tree_hash()
diff --git a/tests/wallet/test_transaction_store.py b/tests/wallet/test_transaction_store.py
--- a/tests/wallet/test_transaction_store.py
+++ b/tests/wallet/test_transaction_store.py
@@ -2,7 +2,7 @@
import dataclasses
from secrets import token_bytes
-from typing import Any, List
+from typing import Any, List, Optional, Tuple
import pytest
@@ -11,7 +11,8 @@
from chia.types.mempool_inclusion_status import MempoolInclusionStatus
from chia.util.errors import Err
from chia.util.ints import uint8, uint32, uint64
-from chia.wallet.transaction_record import TransactionRecord
+from chia.wallet.transaction_record import TransactionRecord, minimum_send_attempts
+from chia.wallet.util.query_filter import TransactionTypeFilter
from chia.wallet.util.transaction_type import TransactionType
from chia.wallet.wallet_transaction_store import WalletTransactionStore, filter_ok_mempool_status
from tests.util.db_connection import DBConnection
@@ -446,6 +447,9 @@ async def test_get_transactions_between_confirmed() -> None:
tr3 = dataclasses.replace(tr1, name=token_bytes(32), confirmed_at_height=uint32(2))
tr4 = dataclasses.replace(tr1, name=token_bytes(32), confirmed_at_height=uint32(3))
tr5 = dataclasses.replace(tr1, name=token_bytes(32), confirmed_at_height=uint32(4))
+ tr6 = dataclasses.replace(
+ tr1, name=token_bytes(32), confirmed_at_height=uint32(5), type=uint32(TransactionType.COINBASE_REWARD.value)
+ )
await store.add_transaction_record(tr1)
await store.add_transaction_record(tr2)
@@ -480,6 +484,36 @@ async def test_get_transactions_between_confirmed() -> None:
assert await store.get_transactions_between(1, 2, 100, reverse=True) == [tr3, tr2, tr1]
assert await store.get_transactions_between(1, 3, 100, reverse=True) == [tr2, tr1]
+ # test type filter (coinbase reward)
+ await store.add_transaction_record(tr6)
+ assert await store.get_transactions_between(
+ 1, 0, 1, reverse=True, type_filter=TransactionTypeFilter.include([TransactionType.COINBASE_REWARD])
+ ) == [tr6]
+ assert await store.get_transactions_between(
+ 1, 0, 1, reverse=True, type_filter=TransactionTypeFilter.exclude([TransactionType.COINBASE_REWARD])
+ ) == [tr5]
+ assert (
+ await store.get_transactions_between(1, 0, 100, reverse=True, type_filter=TransactionTypeFilter.include([]))
+ == []
+ )
+ assert await store.get_transactions_between(
+ 1, 0, 100, reverse=True, type_filter=TransactionTypeFilter.exclude([])
+ ) == [
+ tr6,
+ tr5,
+ tr4,
+ tr3,
+ tr2,
+ tr1,
+ ]
+ assert await store.get_transactions_between(
+ 1,
+ 0,
+ 100,
+ reverse=True,
+ type_filter=TransactionTypeFilter.include([TransactionType.COINBASE_REWARD, TransactionType.OUTGOING_TX]),
+ ) == [tr6, tr5, tr4, tr3, tr2, tr1]
+
@pytest.mark.asyncio
async def test_get_transactions_between_relevance() -> None:
@@ -652,3 +686,26 @@ async def test_get_not_sent() -> None:
assert cmp(not_sent, [])
# TODO: also cover include_accepted_txs=True
+
+
+@pytest.mark.asyncio
+async def test_transaction_record_is_valid() -> None:
+ invalid_attempts: List[Tuple[str, uint8, Optional[str]]] = []
+ # The tx should be valid as long as we don't have minimum_send_attempts failed attempts
+ while len(invalid_attempts) < minimum_send_attempts:
+ assert dataclasses.replace(tr1, sent_to=invalid_attempts).is_valid()
+ invalid_attempts.append(("peer", uint8(MempoolInclusionStatus.FAILED), None))
+ # The tx should be invalid now with more than minimum failed attempts
+ assert len(invalid_attempts) == minimum_send_attempts
+ assert not dataclasses.replace(tr1, sent_to=invalid_attempts).is_valid()
+ mempool_success = ("success", uint8(MempoolInclusionStatus.SUCCESS), None)
+ low_fee = ("low_fee", uint8(MempoolInclusionStatus.FAILED), Err.INVALID_FEE_LOW_FEE.name)
+ close_to_zero = (
+ "close_to_zero",
+ uint8(MempoolInclusionStatus.FAILED),
+ Err.INVALID_FEE_TOO_CLOSE_TO_ZERO.name,
+ )
+ # But it should become valid with one of the above attempts
+ assert dataclasses.replace(tr1, sent_to=invalid_attempts + [mempool_success]).is_valid()
+ assert dataclasses.replace(tr1, sent_to=invalid_attempts + [low_fee]).is_valid()
+ assert dataclasses.replace(tr1, sent_to=invalid_attempts + [close_to_zero]).is_valid()
diff --git a/tests/wallet/test_wallet.py b/tests/wallet/test_wallet.py
--- a/tests/wallet/test_wallet.py
+++ b/tests/wallet/test_wallet.py
@@ -22,10 +22,10 @@
from chia.util.bech32m import encode_puzzle_hash
from chia.util.ints import uint16, uint32, uint64
from chia.wallet.derive_keys import master_sk_to_wallet_sk
+from chia.wallet.payment import Payment
from chia.wallet.transaction_record import TransactionRecord
from chia.wallet.util.compute_memos import compute_memos
from chia.wallet.util.transaction_type import TransactionType
-from chia.wallet.util.wallet_types import AmountWithPuzzlehash
from chia.wallet.wallet import CHIP_0002_SIGN_MESSAGE_PREFIX
from chia.wallet.wallet_node import WalletNode, get_wallet_db_path
from chia.wallet.wallet_state_manager import WalletStateManager
@@ -566,10 +566,7 @@ async def test_wallet_create_hit_max_send_amount(
await time_out_assert(20, wallet.get_confirmed_balance, expected_confirmed_balance)
- primaries: List[AmountWithPuzzlehash] = []
- for i in range(0, 60):
- primaries.append({"puzzlehash": ph, "amount": uint64(1000000000 + i), "memos": []})
-
+ primaries = [Payment(ph, uint64(1000000000 + i)) for i in range(60)]
tx_split_coins = await wallet.generate_signed_transaction(uint64(1), ph, uint64(0), primaries=primaries)
assert tx_split_coins.spend_bundle is not None
diff --git a/tests/wallet/test_wallet_blockchain.py b/tests/wallet/test_wallet_blockchain.py
--- a/tests/wallet/test_wallet_blockchain.py
+++ b/tests/wallet/test_wallet_blockchain.py
@@ -40,15 +40,11 @@ async def test_wallet_blockchain(self, simulator_and_wallet, default_1000_blocks
)
)
weight_proof: WeightProof = full_node_protocol.RespondProofOfWeight.from_bytes(res.data).wp
- success, _, records = await wallet_node._weight_proof_handler.validate_weight_proof(weight_proof, True)
+ records = await wallet_node._weight_proof_handler.validate_weight_proof(weight_proof, True)
weight_proof_short: WeightProof = full_node_protocol.RespondProofOfWeight.from_bytes(res_2.data).wp
- success, _, records_short = await wallet_node._weight_proof_handler.validate_weight_proof(
- weight_proof_short, True
- )
+ records_short = await wallet_node._weight_proof_handler.validate_weight_proof(weight_proof_short, True)
weight_proof_long: WeightProof = full_node_protocol.RespondProofOfWeight.from_bytes(res_3.data).wp
- success, _, records_long = await wallet_node._weight_proof_handler.validate_weight_proof(
- weight_proof_long, True
- )
+ records_long = await wallet_node._weight_proof_handler.validate_weight_proof(weight_proof_long, True)
async with DBConnection(1) as db_wrapper:
store = await KeyValStore.create(db_wrapper)
@@ -73,19 +69,19 @@ async def test_wallet_blockchain(self, simulator_and_wallet, default_1000_blocks
header_block = get_block_header(block, [], [])
header_blocks.append(header_block)
- res, err = await chain.receive_block(header_blocks[50])
+ res, err = await chain.add_block(header_blocks[50])
print(res, err)
assert res == AddBlockResult.DISCONNECTED_BLOCK
- res, err = await chain.receive_block(header_blocks[400])
+ res, err = await chain.add_block(header_blocks[400])
print(res, err)
assert res == AddBlockResult.ALREADY_HAVE_BLOCK
- res, err = await chain.receive_block(header_blocks[507])
+ res, err = await chain.add_block(header_blocks[507])
print(res, err)
assert res == AddBlockResult.DISCONNECTED_BLOCK
- res, err = await chain.receive_block(
+ res, err = await chain.add_block(
dataclasses.replace(header_blocks[506], challenge_chain_ip_proof=VDFProof(2, b"123", True))
)
assert res == AddBlockResult.INVALID_BLOCK
@@ -93,7 +89,7 @@ async def test_wallet_blockchain(self, simulator_and_wallet, default_1000_blocks
assert (await chain.get_peak_block()).height == 505
for block in header_blocks[506:]:
- res, err = await chain.receive_block(block)
+ res, err = await chain.add_block(block)
assert res == AddBlockResult.NEW_PEAK
assert (await chain.get_peak_block()).height == block.height
diff --git a/tests/wallet/test_wallet_coin_store.py b/tests/wallet/test_wallet_coin_store.py
--- a/tests/wallet/test_wallet_coin_store.py
+++ b/tests/wallet/test_wallet_coin_store.py
@@ -1,14 +1,18 @@
from __future__ import annotations
+from dataclasses import dataclass, field, replace
from secrets import token_bytes
+from typing import Dict, List, Optional, Tuple
import pytest
from chia.types.blockchain_format.coin import Coin
-from chia.util.ints import uint32, uint64
-from chia.wallet.util.wallet_types import WalletType
+from chia.util.ints import uint8, uint16, uint32, uint64
+from chia.util.misc import UInt32Range, UInt64Range, VersionedBlob
+from chia.wallet.util.query_filter import AmountFilter, HashFilter
+from chia.wallet.util.wallet_types import CoinType, WalletType
from chia.wallet.wallet_coin_record import WalletCoinRecord
-from chia.wallet.wallet_coin_store import WalletCoinStore
+from chia.wallet.wallet_coin_store import CoinRecordOrder, GetCoinRecords, GetCoinRecordsResult, WalletCoinStore
from tests.util.db_connection import DBConnection
coin_1 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
@@ -18,6 +22,8 @@
coin_5 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
coin_6 = Coin(token_bytes(32), coin_4.puzzle_hash, uint64(12312))
coin_7 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
+coin_8 = Coin(token_bytes(32), token_bytes(32), uint64(2))
+coin_9 = Coin(coin_5.name(), token_bytes(32), uint64(4))
record_replaced = WalletCoinRecord(coin_1, uint32(8), uint32(0), False, True, WalletType.STANDARD_WALLET, 0)
record_1 = WalletCoinRecord(coin_1, uint32(4), uint32(0), False, True, WalletType.STANDARD_WALLET, 0)
record_2 = WalletCoinRecord(coin_2, uint32(5), uint32(0), False, True, WalletType.STANDARD_WALLET, 0)
@@ -66,6 +72,94 @@
WalletType.POOLING_WALLET,
2,
)
+record_8 = WalletCoinRecord(
+ coin_8,
+ uint32(1),
+ uint32(0),
+ False,
+ False,
+ WalletType.STANDARD_WALLET,
+ 1,
+ CoinType.CLAWBACK,
+ VersionedBlob(uint16(1), b"TEST"),
+)
+record_9 = WalletCoinRecord(
+ coin_9,
+ uint32(1),
+ uint32(2),
+ True,
+ False,
+ WalletType.STANDARD_WALLET,
+ 2,
+ CoinType.CLAWBACK,
+ VersionedBlob(uint16(1), b"TEST"),
+)
+
+
+def get_dummy_record(wallet_id: int) -> WalletCoinRecord:
+ return WalletCoinRecord(
+ Coin(token_bytes(32), token_bytes(32), uint64(12312)),
+ uint32(0),
+ uint32(0),
+ False,
+ False,
+ WalletType.STANDARD_WALLET,
+ wallet_id,
+ )
+
+
+@dataclass
+class DummyWalletCoinRecords:
+ records_per_wallet: Dict[int, List[WalletCoinRecord]] = field(default_factory=dict)
+
+ def generate(self, wallet_id: int, count: int) -> None:
+ records = self.records_per_wallet.setdefault(wallet_id, [])
+ for _ in range(count):
+ records.append(get_dummy_record(wallet_id))
+
+
+@pytest.mark.parametrize(
+ "invalid_record, error",
+ [
+ (replace(record_8, metadata=None), "Can't parse None metadata"),
+ (replace(record_8, coin_type=CoinType.NORMAL), "Unknown metadata"),
+ ],
+)
+def test_wallet_coin_record_parsed_metadata_failures(invalid_record: WalletCoinRecord, error: str) -> None:
+ with pytest.raises(ValueError, match=error):
+ invalid_record.parsed_metadata()
+
+
+@pytest.mark.parametrize(
+ "coin_record, expected_metadata_type",
+ [
+ (record_8, VersionedBlob), # TODO: Replace proper clawback metadata here when its introduced
+ ],
+)
+def test_wallet_coin_record_parsed_metadata(coin_record: WalletCoinRecord, expected_metadata_type: type) -> None:
+ assert type(coin_record.parsed_metadata()) == expected_metadata_type
+
+
+@pytest.mark.parametrize("coin_record", [record_1, record_2, record_8])
+def test_wallet_coin_record_json_parsed(coin_record: WalletCoinRecord) -> None:
+ expected_metadata = None
+ if coin_record.coin_type == CoinType.CLAWBACK:
+ assert coin_record.metadata is not None
+ # TODO: Parse proper clawback metadata here when its introduced
+ expected_metadata = coin_record.metadata.to_json_dict()
+
+ assert coin_record.to_json_dict_parsed_metadata() == {
+ "id": "0x" + coin_record.name().hex(),
+ "amount": coin_record.coin.amount,
+ "puzzle_hash": "0x" + coin_record.coin.puzzle_hash.hex(),
+ "parent_coin_info": "0x" + coin_record.coin.parent_coin_info.hex(),
+ "type": coin_record.coin_type,
+ "wallet_identifier": coin_record.wallet_identifier().to_json_dict(),
+ "confirmed_height": coin_record.confirmed_block_height,
+ "metadata": expected_metadata,
+ "spent_height": coin_record.spent_block_height,
+ "coinbase": coin_record.coinbase,
+ }
@pytest.mark.asyncio
@@ -95,20 +189,6 @@ async def test_persistance() -> None:
assert await store.get_coin_record(coin_1.name()) == record_1
-@pytest.mark.asyncio
-async def test_bulk_get() -> None:
- async with DBConnection(1) as db_wrapper:
- store = await WalletCoinStore.create(db_wrapper)
- await store.add_coin_record(record_1)
- await store.add_coin_record(record_2)
- await store.add_coin_record(record_3)
- await store.add_coin_record(record_4)
-
- store = await WalletCoinStore.create(db_wrapper)
- records = await store.get_coin_records([coin_1.name(), coin_2.name(), token_bytes(32), coin_4.name()])
- assert records == {coin_1.name(): record_1, coin_2.name(): record_2, coin_4.name(): record_4}
-
-
@pytest.mark.asyncio
async def test_set_spent() -> None:
async with DBConnection(1) as db_wrapper:
@@ -151,6 +231,7 @@ async def test_get_unspent_coins_for_wallet() -> None:
await store.add_coin_record(record_5) # wallet 1
await store.add_coin_record(record_6) # this is spent and wallet 2
await store.add_coin_record(record_7) # wallet 2
+ await store.add_coin_record(record_8)
assert await store.get_unspent_coins_for_wallet(1) == set([record_5])
assert await store.get_unspent_coins_for_wallet(2) == set([record_7])
@@ -174,6 +255,8 @@ async def test_get_unspent_coins_for_wallet() -> None:
assert await store.get_unspent_coins_for_wallet(2) == set()
assert await store.get_unspent_coins_for_wallet(3) == set()
+ assert await store.get_unspent_coins_for_wallet(1, coin_type=CoinType.CLAWBACK) == set([record_8])
+
@pytest.mark.asyncio
async def test_get_all_unspent_coins() -> None:
@@ -185,6 +268,7 @@ async def test_get_all_unspent_coins() -> None:
await store.add_coin_record(record_1) # not spent
await store.add_coin_record(record_2) # not spent
await store.add_coin_record(record_3) # spent
+ await store.add_coin_record(record_8) # spent
assert await store.get_all_unspent_coins() == set([record_1, record_2])
await store.add_coin_record(record_4) # spent
@@ -208,6 +292,8 @@ async def test_get_all_unspent_coins() -> None:
await store.set_spent(coin_1.name(), uint32(12))
assert await store.get_all_unspent_coins() == set()
+ assert await store.get_all_unspent_coins(coin_type=CoinType.CLAWBACK) == set([record_8])
+
@pytest.mark.asyncio
async def test_get_records_by_parent_id() -> None:
@@ -244,9 +330,9 @@ async def test_delete_coin_record() -> None:
await store.add_coin_record(record_6)
await store.add_coin_record(record_7)
- assert set(
- (
- await store.get_coin_records(
+ assert (
+ await store.get_coin_records(
+ coin_id_filter=HashFilter.include(
[
coin_1.name(),
coin_2.name(),
@@ -257,21 +343,491 @@ async def test_delete_coin_record() -> None:
coin_7.name(),
]
)
- ).values()
- ) == set([record_1, record_2, record_3, record_4, record_5, record_6, record_7])
+ )
+ ).records == [record_1, record_2, record_3, record_4, record_5, record_6, record_7]
assert await store.get_coin_record(coin_1.name()) == record_1
await store.delete_coin_record(coin_1.name())
assert await store.get_coin_record(coin_1.name()) is None
- assert set(
- (
- await store.get_coin_records(
+ assert (
+ await store.get_coin_records(
+ coin_id_filter=HashFilter.include(
[coin_2.name(), coin_3.name(), coin_4.name(), coin_5.name(), coin_6.name(), coin_7.name()]
)
- ).values()
- ) == set([record_2, record_3, record_4, record_5, record_6, record_7])
+ )
+ ).records == [record_2, record_3, record_4, record_5, record_6, record_7]
+
+
+get_coin_records_offset_limit_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [
+ (GetCoinRecords(offset=uint32(0), limit=uint32(0)), []),
+ (GetCoinRecords(offset=uint32(10), limit=uint32(0)), []),
+ (GetCoinRecords(offset=uint32(0), limit=uint32(1)), [record_8]),
+ (GetCoinRecords(offset=uint32(1), limit=uint32(1)), [record_9]),
+ (GetCoinRecords(offset=uint32(0), limit=uint32(2)), [record_8, record_9]),
+ (GetCoinRecords(offset=uint32(0), limit=uint32(5)), [record_8, record_9, record_1, record_2, record_3]),
+ (GetCoinRecords(coin_type=uint8(CoinType.CLAWBACK), offset=uint32(0), limit=uint32(5)), [record_8, record_9]),
+ (GetCoinRecords(offset=uint32(2), limit=uint32(5)), [record_1, record_2, record_3, record_4, record_5]),
+ (GetCoinRecords(coin_type=uint8(CoinType.CLAWBACK), offset=uint32(5), limit=uint32(1)), []),
+]
+
+get_coin_records_wallet_id_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [
+ (
+ GetCoinRecords(),
+ [record_8, record_9, record_1, record_2, record_3, record_4, record_5, record_6, record_7],
+ ),
+ (GetCoinRecords(wallet_id=uint32(0)), [record_1, record_2, record_3, record_4]),
+ (GetCoinRecords(wallet_id=uint32(1)), [record_8, record_5]),
+ (GetCoinRecords(wallet_id=uint32(2)), [record_9, record_6, record_7]),
+]
+
+get_coin_records_wallet_type_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [
+ (GetCoinRecords(wallet_id=uint32(2), wallet_type=uint8(WalletType.STANDARD_WALLET)), [record_9, record_6]),
+ (GetCoinRecords(wallet_type=uint8(WalletType.POOLING_WALLET)), [record_7]),
+ (GetCoinRecords(wallet_type=uint8(WalletType.NFT)), []),
+]
+
+get_coin_records_coin_type_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [
+ (GetCoinRecords(wallet_id=uint32(0), coin_type=uint8(CoinType.NORMAL)), [record_1, record_2, record_3, record_4]),
+ (GetCoinRecords(wallet_id=uint32(0), coin_type=uint8(CoinType.CLAWBACK)), []),
+ (GetCoinRecords(wallet_id=uint32(1), coin_type=uint8(CoinType.NORMAL)), [record_5]),
+ (GetCoinRecords(wallet_id=uint32(1), coin_type=uint8(CoinType.CLAWBACK)), [record_8]),
+ (GetCoinRecords(coin_type=uint8(CoinType.CLAWBACK)), [record_8, record_9]),
+]
+
+get_coin_records_coin_id_filter_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [
+ (GetCoinRecords(coin_id_filter=HashFilter.include([])), []),
+ (GetCoinRecords(coin_id_filter=HashFilter.include([coin_1.name(), coin_4.name()])), [record_1, record_4]),
+ (GetCoinRecords(coin_id_filter=HashFilter.include([coin_1.name(), coin_4.puzzle_hash])), [record_1]),
+ (GetCoinRecords(coin_id_filter=HashFilter.include([coin_9.name()])), [record_9]),
+ (GetCoinRecords(wallet_id=uint32(0), coin_id_filter=HashFilter.include([coin_9.name()])), []),
+ (
+ GetCoinRecords(wallet_id=uint32(0), coin_id_filter=HashFilter.exclude([coin_9.name()])),
+ [record_1, record_2, record_3, record_4],
+ ),
+]
+
+
+get_coin_records_puzzle_hash_filter_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [
+ (GetCoinRecords(puzzle_hash_filter=HashFilter.include([])), []),
+ (
+ GetCoinRecords(puzzle_hash_filter=HashFilter.include([coin_1.puzzle_hash, coin_4.puzzle_hash])),
+ [record_1, record_4, record_6],
+ ),
+ (GetCoinRecords(puzzle_hash_filter=HashFilter.include([coin_1.puzzle_hash, coin_4.name()])), [record_1]),
+ (GetCoinRecords(puzzle_hash_filter=HashFilter.include([coin_7.puzzle_hash])), [record_7]),
+ (
+ GetCoinRecords(
+ wallet_type=uint8(WalletType.STANDARD_WALLET), puzzle_hash_filter=HashFilter.include([coin_7.puzzle_hash])
+ ),
+ [],
+ ),
+ (
+ GetCoinRecords(
+ wallet_type=uint8(WalletType.STANDARD_WALLET),
+ puzzle_hash_filter=HashFilter.exclude([coin_7.puzzle_hash]),
+ ),
+ [record_8, record_9, record_1, record_2, record_3, record_4, record_5, record_6],
+ ),
+]
+
+get_coin_records_parent_coin_id_filter_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [
+ (GetCoinRecords(parent_coin_id_filter=HashFilter.include([])), []),
+ (
+ GetCoinRecords(parent_coin_id_filter=HashFilter.include([coin_5.name(), coin_4.parent_coin_info])),
+ [record_9, record_4],
+ ),
+ (GetCoinRecords(parent_coin_id_filter=HashFilter.include([coin_1.parent_coin_info])), [record_1, record_2]),
+ (GetCoinRecords(parent_coin_id_filter=HashFilter.include([coin_7.puzzle_hash])), []),
+ (
+ GetCoinRecords(
+ coin_type=uint8(CoinType.CLAWBACK),
+ parent_coin_id_filter=HashFilter.include([coin_5.name(), coin_4.parent_coin_info]),
+ ),
+ [record_9],
+ ),
+ (
+ GetCoinRecords(
+ coin_type=uint8(CoinType.CLAWBACK),
+ parent_coin_id_filter=HashFilter.exclude([coin_5.name(), coin_4.parent_coin_info]),
+ ),
+ [record_8],
+ ),
+]
+
+get_coin_records_amount_filter_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [
+ (GetCoinRecords(amount_filter=AmountFilter.include([])), []),
+ (
+ GetCoinRecords(amount_filter=AmountFilter.include([uint64(12312)])),
+ [record_1, record_3, record_4, record_5, record_6, record_7],
+ ),
+ (GetCoinRecords(amount_filter=AmountFilter.exclude([uint64(12312)])), [record_8, record_9, record_2]),
+ (GetCoinRecords(amount_filter=AmountFilter.include([uint64(2), uint64(4)])), [record_8, record_9]),
+ (
+ GetCoinRecords(amount_filter=AmountFilter.include([uint64(12311), uint64(2), uint64(4)])),
+ [record_8, record_9, record_2],
+ ),
+ (
+ GetCoinRecords(
+ coin_type=uint8(CoinType.CLAWBACK),
+ amount_filter=AmountFilter.include([uint64(12311), uint64(2), uint64(4)]),
+ ),
+ [record_8, record_9],
+ ),
+ (
+ GetCoinRecords(
+ coin_type=uint8(CoinType.CLAWBACK),
+ amount_filter=AmountFilter.exclude([uint64(12311), uint64(2), uint64(4)]),
+ ),
+ [],
+ ),
+]
+
+get_coin_records_amount_range_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [
+ (GetCoinRecords(amount_range=UInt64Range(start=uint64(1000000))), []),
+ (GetCoinRecords(amount_range=UInt64Range(stop=uint64(0))), []),
+ (
+ GetCoinRecords(amount_range=UInt64Range(start=uint64(12312))),
+ [record_1, record_3, record_4, record_5, record_6, record_7],
+ ),
+ (GetCoinRecords(amount_range=UInt64Range(stop=uint64(4))), [record_8, record_9]),
+ (GetCoinRecords(amount_range=UInt64Range(start=uint64(2), stop=uint64(12311))), [record_8, record_9, record_2]),
+ (GetCoinRecords(amount_range=UInt64Range(start=uint64(4), stop=uint64(12311))), [record_9, record_2]),
+ (GetCoinRecords(amount_range=UInt64Range(start=uint64(5), stop=uint64(12311))), [record_2]),
+]
+
+get_coin_records_confirmed_range_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [
+ (GetCoinRecords(confirmed_range=UInt32Range(start=uint32(20))), []),
+ (GetCoinRecords(confirmed_range=UInt32Range(stop=uint32(0))), []),
+ (GetCoinRecords(confirmed_range=UInt32Range(start=uint32(2), stop=uint32(1))), []),
+ (
+ GetCoinRecords(confirmed_range=UInt32Range(start=uint32(5))),
+ [record_2, record_3, record_4, record_5, record_6, record_7],
+ ),
+ (GetCoinRecords(confirmed_range=UInt32Range(stop=uint32(2))), [record_8, record_9]),
+ (GetCoinRecords(confirmed_range=UInt32Range(stop=uint32(4))), [record_8, record_9, record_1]),
+ (GetCoinRecords(confirmed_range=UInt32Range(start=uint32(4), stop=uint32(4))), [record_1]),
+ (
+ GetCoinRecords(confirmed_range=UInt32Range(start=uint32(4), stop=uint32(5))),
+ [record_1, record_2, record_3, record_4, record_5, record_6, record_7],
+ ),
+]
+
+get_coin_records_spent_range_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [
+ (GetCoinRecords(spent_range=UInt32Range(start=uint32(20))), []),
+ (GetCoinRecords(spent_range=UInt32Range(stop=uint32(0))), [record_8, record_1, record_2, record_5, record_7]),
+ (GetCoinRecords(spent_range=UInt32Range(start=uint32(2), stop=uint32(1))), []),
+ (GetCoinRecords(spent_range=UInt32Range(start=uint32(5), stop=uint32(10))), [record_3]),
+ (GetCoinRecords(spent_range=UInt32Range(start=uint32(2), stop=uint32(10))), [record_9, record_3]),
+ (GetCoinRecords(spent_range=UInt32Range(start=uint32(5), stop=uint32(15))), [record_3, record_4, record_6]),
+]
+
+get_coin_records_order_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [
+ (
+ GetCoinRecords(wallet_id=uint32(0), order=uint8(CoinRecordOrder.spent_height)),
+ [record_1, record_2, record_3, record_4],
+ ),
+ (GetCoinRecords(wallet_id=uint32(1), order=uint8(CoinRecordOrder.spent_height)), [record_5, record_8]),
+ (
+ GetCoinRecords(
+ confirmed_range=UInt32Range(start=uint32(4), stop=uint32(5)), order=uint8(CoinRecordOrder.spent_height)
+ ),
+ [record_1, record_2, record_5, record_7, record_3, record_4, record_6],
+ ),
+]
+
+get_coin_records_reverse_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [
+ (
+ GetCoinRecords(wallet_id=uint32(0), order=uint8(CoinRecordOrder.spent_height), reverse=True),
+ [record_4, record_3, record_1, record_2],
+ ),
+ (
+ GetCoinRecords(wallet_id=uint32(1), order=uint8(CoinRecordOrder.spent_height), reverse=True),
+ [record_5, record_8],
+ ),
+ (
+ GetCoinRecords(confirmed_range=UInt32Range(start=uint32(1), stop=uint32(4)), reverse=True),
+ [record_1, record_8, record_9],
+ ),
+ (
+ GetCoinRecords(
+ confirmed_range=UInt32Range(start=uint32(4), stop=uint32(5)),
+ order=uint8(CoinRecordOrder.spent_height),
+ reverse=True,
+ ),
+ [record_4, record_6, record_3, record_1, record_2, record_5, record_7],
+ ),
+]
+
+get_coin_records_include_total_count_tests: List[Tuple[GetCoinRecords, int, List[WalletCoinRecord]]] = [
+ (GetCoinRecords(wallet_id=uint32(0), include_total_count=True), 4, [record_1, record_2, record_3, record_4]),
+ (
+ GetCoinRecords(wallet_id=uint32(0), offset=uint32(1), limit=uint32(2), include_total_count=True),
+ 4,
+ [record_2, record_3],
+ ),
+ (GetCoinRecords(wallet_id=uint32(1), include_total_count=True), 2, [record_8, record_5]),
+ (GetCoinRecords(wallet_type=uint8(WalletType.NFT), include_total_count=True), 0, []),
+ (GetCoinRecords(wallet_type=uint8(WalletType.POOLING_WALLET), include_total_count=True), 1, [record_7]),
+]
+
+get_coin_records_mixed_tests: List[Tuple[GetCoinRecords, int, List[WalletCoinRecord]]] = [
+ (
+ GetCoinRecords(
+ offset=uint32(2),
+ limit=uint32(2),
+ coin_id_filter=HashFilter.include([coin_1.name(), coin_5.name(), coin_8.name(), coin_9.name()]),
+ puzzle_hash_filter=HashFilter.exclude([coin_2.puzzle_hash]),
+ parent_coin_id_filter=HashFilter.exclude([coin_7.parent_coin_info]),
+ include_total_count=True,
+ ),
+ 4,
+ [record_1, record_5],
+ ),
+ (
+ GetCoinRecords(
+ offset=uint32(3),
+ limit=uint32(4),
+ wallet_type=uint8(WalletType.STANDARD_WALLET),
+ coin_type=uint8(CoinType.NORMAL),
+ puzzle_hash_filter=HashFilter.exclude([coin_2.puzzle_hash]),
+ parent_coin_id_filter=HashFilter.exclude([coin_7.parent_coin_info]),
+ include_total_count=True,
+ ),
+ 5,
+ [record_5, record_6],
+ ),
+ (
+ GetCoinRecords(
+ offset=uint32(1),
+ limit=uint32(2),
+ wallet_id=uint32(0),
+ wallet_type=uint8(WalletType.STANDARD_WALLET),
+ coin_type=uint8(CoinType.NORMAL),
+ coin_id_filter=HashFilter.exclude([coin_1.puzzle_hash]),
+ puzzle_hash_filter=HashFilter.include(
+ [coin_1.puzzle_hash, coin_2.puzzle_hash, coin_3.puzzle_hash, coin_4.puzzle_hash]
+ ),
+ parent_coin_id_filter=HashFilter.exclude([coin_7.parent_coin_info]),
+ amount_filter=AmountFilter.exclude([uint64(10)]),
+ amount_range=UInt64Range(start=uint64(20), stop=uint64(200000)),
+ confirmed_range=UInt32Range(start=uint32(2), stop=uint32(30)),
+ spent_range=UInt32Range(start=uint32(1), stop=uint32(15)),
+ order=uint8(CoinRecordOrder.spent_height),
+ reverse=True,
+ include_total_count=True,
+ ),
+ 2,
+ [record_3],
+ ),
+]
+
+
+async def run_get_coin_records_test(
+ request: GetCoinRecords, total_count: Optional[int], coin_records: List[WalletCoinRecord]
+) -> None:
+ async with DBConnection(1) as db_wrapper:
+ store = await WalletCoinStore.create(db_wrapper)
+
+ for record in [record_1, record_2, record_3, record_4, record_5, record_6, record_7, record_8, record_9]:
+ await store.add_coin_record(record)
+
+ result = await store.get_coin_records(
+ offset=request.offset,
+ limit=request.limit,
+ wallet_id=request.wallet_id,
+ wallet_type=None if request.wallet_type is None else WalletType(request.wallet_type),
+ coin_type=None if request.coin_type is None else CoinType(request.coin_type),
+ coin_id_filter=request.coin_id_filter,
+ puzzle_hash_filter=request.puzzle_hash_filter,
+ parent_coin_id_filter=request.parent_coin_id_filter,
+ amount_filter=request.amount_filter,
+ amount_range=request.amount_range,
+ confirmed_range=request.confirmed_range,
+ spent_range=request.spent_range,
+ order=CoinRecordOrder(request.order),
+ reverse=request.reverse,
+ include_total_count=request.include_total_count,
+ )
+
+ assert result.records == coin_records
+ assert result.coin_id_to_record == {coin.name(): coin for coin in coin_records}
+ assert result.total_count == total_count
+
+
+@pytest.mark.parametrize("coins_request, records", [*get_coin_records_offset_limit_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_offset_limit(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None:
+ await run_get_coin_records_test(coins_request, None, records)
+
+
+@pytest.mark.parametrize("coins_request, records", [*get_coin_records_wallet_id_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_wallet_id(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None:
+ await run_get_coin_records_test(coins_request, None, records)
+
+
+@pytest.mark.parametrize("coins_request, records", [*get_coin_records_wallet_type_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_wallet_type(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None:
+ await run_get_coin_records_test(coins_request, None, records)
+
+
+@pytest.mark.parametrize("coins_request, records", [*get_coin_records_coin_type_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_coin_type(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None:
+ await run_get_coin_records_test(coins_request, None, records)
+
+
+@pytest.mark.parametrize("coins_request, records", [*get_coin_records_coin_id_filter_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_coin_id_filter(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None:
+ await run_get_coin_records_test(coins_request, None, records)
+
+
+@pytest.mark.parametrize("coins_request, records", [*get_coin_records_puzzle_hash_filter_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_puzzle_hash_filter(
+ coins_request: GetCoinRecords, records: List[WalletCoinRecord]
+) -> None:
+ await run_get_coin_records_test(coins_request, None, records)
+
+
+@pytest.mark.parametrize("coins_request, records", [*get_coin_records_parent_coin_id_filter_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_parent_coin_id_filter(
+ coins_request: GetCoinRecords, records: List[WalletCoinRecord]
+) -> None:
+ await run_get_coin_records_test(coins_request, None, records)
+
+
+@pytest.mark.parametrize("coins_request, records", [*get_coin_records_amount_filter_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_amount_filter(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None:
+ await run_get_coin_records_test(coins_request, None, records)
+
+
+@pytest.mark.parametrize("coins_request, records", [*get_coin_records_confirmed_range_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_confirmed_range(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None:
+ await run_get_coin_records_test(coins_request, None, records)
+
+
+@pytest.mark.parametrize("coins_request, records", [*get_coin_records_spent_range_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_spent_range(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None:
+ await run_get_coin_records_test(coins_request, None, records)
+
+
+@pytest.mark.parametrize("coins_request, records", [*get_coin_records_amount_range_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_amount_range(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None:
+ await run_get_coin_records_test(coins_request, None, records)
+
+
+@pytest.mark.parametrize("coins_request, records", [*get_coin_records_order_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_order(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None:
+ await run_get_coin_records_test(coins_request, None, records)
+
+
+@pytest.mark.parametrize("coins_request, records", [*get_coin_records_reverse_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_reverse(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None:
+ await run_get_coin_records_test(coins_request, None, records)
+
+
+@pytest.mark.parametrize("coins_request, total_count, records", [*get_coin_records_include_total_count_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_total_count(
+ coins_request: GetCoinRecords, total_count: int, records: List[WalletCoinRecord]
+) -> None:
+ await run_get_coin_records_test(coins_request, total_count, records)
+
+
+@pytest.mark.parametrize("coins_request, total_count, records", [*get_coin_records_mixed_tests])
+@pytest.mark.asyncio
+async def test_get_coin_records_mixed(
+ coins_request: GetCoinRecords, total_count: int, records: List[WalletCoinRecord]
+) -> None:
+ await run_get_coin_records_test(coins_request, total_count, records)
+
+
+@pytest.mark.asyncio
+async def test_get_coin_records_total_count_cache() -> None:
+ async with DBConnection(1) as db_wrapper:
+ store = await WalletCoinStore.create(db_wrapper)
+
+ for record in [record_1, record_2, record_3]:
+ await store.add_coin_record(record)
+
+ # Make sure the total count increases for the same query when adding more records
+ assert (await store.get_coin_records(include_total_count=True)).total_count == 3
+ await store.add_coin_record(record_4)
+ assert (await store.get_coin_records(include_total_count=True)).total_count == 4
+ # Make sure the total count increases for the same query when changing spent state
+ assert (
+ await store.get_coin_records(spent_range=UInt32Range(start=uint32(10)), include_total_count=True)
+ ).total_count == 2
+ await store.set_spent(record_1.name(), 10)
+ assert (
+ await store.get_coin_records(spent_range=UInt32Range(start=uint32(10)), include_total_count=True)
+ ).total_count == 3
+ # Make sure the total count increases for the same query when deleting a coin record
+ assert (await store.get_coin_records(include_total_count=True)).total_count == 4
+ await store.delete_coin_record(record_4.name())
+ assert (await store.get_coin_records(include_total_count=True)).total_count == 3
+ # Make sure the total count increases for the same query when rolling back
+ assert (await store.get_coin_records(include_total_count=True)).total_count == 3
+ await store.rollback_to_block(0),
+ assert (await store.get_coin_records(include_total_count=True)).total_count == 0
+
+
+@pytest.mark.asyncio
+async def test_get_coin_records_total_count_cache_reset() -> None:
+ async with DBConnection(1) as db_wrapper:
+ store = await WalletCoinStore.create(db_wrapper)
+
+ for record in [record_1, record_2, record_3, record_8, record_9]:
+ await store.add_coin_record(record)
+
+ def assert_result(result: GetCoinRecordsResult, *, expected_total_count: int, expected_cache_size: int) -> None:
+ assert result.total_count == expected_total_count
+ assert len(store.total_count_cache.cache) == expected_cache_size
+
+ async def test_cache() -> None:
+ # Try each request a few times and make sure the cache count states the same for each time but increases
+ # with every new request.
+ for _ in range(5):
+ result = await store.get_coin_records(
+ coin_id_filter=HashFilter.include([record_1.name()]), include_total_count=True
+ )
+ assert_result(result, expected_total_count=1, expected_cache_size=1)
+ for _ in range(5):
+ result = await store.get_coin_records(coin_type=CoinType.CLAWBACK, include_total_count=True)
+ assert_result(result, expected_total_count=2, expected_cache_size=2)
+ for _ in range(5):
+ result = await store.get_coin_records(
+ coin_id_filter=HashFilter.include([record_2.name()]), include_total_count=True
+ )
+ assert_result(result, expected_total_count=1, expected_cache_size=3)
+ for _ in range(5):
+ result = await store.get_coin_records(
+ coin_id_filter=HashFilter.include([record_1.name(), record_2.name()]), include_total_count=True
+ )
+ assert_result(result, expected_total_count=2, expected_cache_size=4)
+
+ # All the actions in here should reset the cache and lead to the same results again in `test_cache`.
+ for trigger in [
+ store.add_coin_record(record_4),
+ store.set_spent(coin_4.name(), 10),
+ store.delete_coin_record(record_4.name()),
+ store.rollback_to_block(1000),
+ store.delete_wallet(uint32(record_1.wallet_id)),
+ ]:
+ await test_cache()
+ await trigger
def record(c: Coin, *, confirmed: int, spent: int) -> WalletCoinRecord:
@@ -320,9 +876,9 @@ async def test_rollback_to_block() -> None:
await store.add_coin_record(r4)
await store.add_coin_record(r5)
- assert set(
- (
- await store.get_coin_records(
+ assert (
+ await store.get_coin_records(
+ coin_id_filter=HashFilter.include(
[
coin_1.name(),
coin_2.name(),
@@ -331,16 +887,14 @@ async def test_rollback_to_block() -> None:
coin_5.name(),
]
)
- ).values()
- ) == set(
- [
- r1,
- r2,
- r3,
- r4,
- r5,
- ]
- )
+ )
+ ).records == [
+ r1,
+ r2,
+ r3,
+ r4,
+ r5,
+ ]
assert await store.get_coin_record(coin_5.name()) == r5
@@ -378,17 +932,64 @@ async def test_count_small_unspent() -> None:
await store.add_coin_record(r1)
await store.add_coin_record(r2)
await store.add_coin_record(r3)
+ await store.add_coin_record(record_8)
assert await store.count_small_unspent(5) == 3
assert await store.count_small_unspent(4) == 2
assert await store.count_small_unspent(3) == 2
assert await store.count_small_unspent(2) == 1
assert await store.count_small_unspent(1) == 0
+ assert await store.count_small_unspent(3, coin_type=CoinType.CLAWBACK) == 1
await store.set_spent(coin_2.name(), uint32(12))
+ await store.set_spent(coin_8.name(), uint32(12))
assert await store.count_small_unspent(5) == 2
assert await store.count_small_unspent(4) == 1
assert await store.count_small_unspent(3) == 1
assert await store.count_small_unspent(2) == 1
+ assert await store.count_small_unspent(3, coin_type=CoinType.CLAWBACK) == 0
assert await store.count_small_unspent(1) == 0
+
+
+@pytest.mark.asyncio
+async def test_get_coin_records_between() -> None:
+ async with DBConnection(1) as db_wrapper:
+ store = await WalletCoinStore.create(db_wrapper)
+
+ assert await store.get_all_unspent_coins() == set()
+
+ await store.add_coin_record(record_1) # not spent
+ await store.add_coin_record(record_2) # not spent
+ await store.add_coin_record(record_5) # spent
+ await store.add_coin_record(record_8) # spent
+
+ records = await store.get_coin_records_between(1, 0, 0)
+ assert len(records) == 0
+ records = await store.get_coin_records_between(1, 0, 3)
+ assert len(records) == 1
+ assert records[0] == record_5
+ records = await store.get_coin_records_between(1, 0, 4, coin_type=CoinType.CLAWBACK)
+ assert len(records) == 1
+ assert records[0] == record_8
+
+
+@pytest.mark.asyncio
+async def test_delete_wallet() -> None:
+ dummy_records = DummyWalletCoinRecords()
+ for i in range(5):
+ dummy_records.generate(i, i * 5)
+ async with DBConnection(1) as wrapper:
+ store = await WalletCoinStore.create(wrapper)
+ # Add the records per wallet and verify them
+ for wallet_id, records in dummy_records.records_per_wallet.items():
+ for coin_record in records:
+ await store.add_coin_record(coin_record)
+ assert set((await store.get_coin_records(wallet_id=wallet_id)).records) == set(records)
+ # Remove one wallet after the other and verify before and after each
+ for wallet_id, records in dummy_records.records_per_wallet.items():
+ # Assert the existence again here to make sure the previous removals did not affect other wallet_ids
+ assert set((await store.get_coin_records(wallet_id=wallet_id)).records) == set(records)
+ # Remove the wallet_id and make sure its removed fully
+ await store.delete_wallet(wallet_id)
+ assert (await store.get_coin_records(wallet_id=wallet_id)).records == []
diff --git a/tests/wallet/test_wallet_node.py b/tests/wallet/test_wallet_node.py
--- a/tests/wallet/test_wallet_node.py
+++ b/tests/wallet/test_wallet_node.py
@@ -2,16 +2,20 @@
import sys
from pathlib import Path
-from typing import Any, Dict, Optional
+from typing import Any, Dict, List, Optional
import pytest
from blspy import PrivateKey
from chia.simulator.block_tools import test_constants
from chia.simulator.setup_nodes import SimulatorsAndWallets
+from chia.simulator.time_out_assert import time_out_assert
+from chia.types.full_block import FullBlock
+from chia.types.peer_info import PeerInfo
from chia.util.config import load_config
-from chia.util.keychain import Keychain, generate_mnemonic
-from chia.wallet.wallet_node import WalletNode
+from chia.util.ints import uint16, uint32, uint128
+from chia.util.keychain import Keychain, KeyData, generate_mnemonic
+from chia.wallet.wallet_node import Balance, WalletNode
@pytest.mark.asyncio
@@ -143,7 +147,7 @@ def test_log_out(root_path_populated_with_config: Path, get_temp_keyring: Keycha
assert node.logged_in_fingerprint == fingerprint
assert node.get_last_used_fingerprint() == fingerprint
- node.log_out() # type: ignore
+ node.log_out()
assert node.logged_in is False
assert node.logged_in_fingerprint is None
@@ -302,3 +306,81 @@ async def test_unique_puzzle_hash_subscriptions(simulator_and_wallet: Simulators
puzzle_hashes = await node.get_puzzle_hashes_to_subscribe()
assert len(puzzle_hashes) > 1
assert len(set(puzzle_hashes)) == len(puzzle_hashes)
+
+
+@pytest.mark.asyncio
+async def test_get_balance(
+ simulator_and_wallet: SimulatorsAndWallets, self_hostname: str, default_400_blocks: List[FullBlock]
+) -> None:
+ [full_node_api], [(wallet_node, wallet_server)], bt = simulator_and_wallet
+ full_node_server = full_node_api.full_node.server
+
+ def wallet_synced() -> bool:
+ return full_node_server.node_id in wallet_node.synced_peers
+
+ async def restart_with_fingerprint(fingerprint: Optional[int]) -> None:
+ wallet_node._close()
+ await wallet_node._await_closed(shutting_down=False)
+ await wallet_node._start_with_fingerprint(fingerprint=fingerprint)
+
+ wallet_id = uint32(1)
+ initial_fingerprint = wallet_node.logged_in_fingerprint
+
+ # TODO, there is a bug in wallet_short_sync_backtrack which leads to a rollback to 0 (-1 which is another a bug) and
+ # with that to a KeyError when applying the race cache if there are less than WEIGHT_PROOF_RECENT_BLOCKS
+ # blocks but we still have a peak stored in the DB. So we need to add enough blocks for a weight proof here to
+ # be able to restart the wallet in this test.
+ for block in default_400_blocks:
+ await full_node_api.full_node.add_block(block)
+
+ # Initially there should be no sync and no balance
+ assert not wallet_synced()
+ assert await wallet_node.get_balance(wallet_id) == Balance()
+ # Generate some funds, get the balance and make sure it's as expected
+ await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
+ await time_out_assert(30, wallet_synced)
+ generated_funds = await full_node_api.farm_blocks_to_wallet(5, wallet_node.wallet_state_manager.main_wallet)
+ expected_generated_balance = Balance(
+ confirmed_wallet_balance=uint128(generated_funds),
+ unconfirmed_wallet_balance=uint128(generated_funds),
+ spendable_balance=uint128(generated_funds),
+ max_send_amount=uint128(generated_funds),
+ unspent_coin_count=uint32(10),
+ )
+ generated_balance = await wallet_node.get_balance(wallet_id)
+ assert generated_balance == expected_generated_balance
+ # Load another key without funds, make sure the balance is empty.
+ other_key = KeyData.generate()
+ assert wallet_node.local_keychain is not None
+ wallet_node.local_keychain.add_private_key(other_key.mnemonic_str())
+ await restart_with_fingerprint(other_key.fingerprint)
+ assert await wallet_node.get_balance(wallet_id) == Balance()
+ # Load the initial fingerprint again and make sure the balance is still what we generated earlier
+ await restart_with_fingerprint(initial_fingerprint)
+ assert await wallet_node.get_balance(wallet_id) == generated_balance
+ # Connect and sync to the full node, generate more funds and test the balance caching
+ # TODO, there is a bug in untrusted sync if we try to sync to the same peak as stored in the DB after restart
+ # which leads to a rollback to 0 (-1 which is another a bug) and then to a validation error because the
+ # downloaded weight proof will not be added to the blockchain properly because we still have a peak with the
+ # same weight stored in the DB but without chain data. The 1 block generation below can be dropped if we just
+ # also store the chain data or maybe adjust the weight proof consideration logic in new_valid_weight_proof.
+ await full_node_api.farm_blocks_to_puzzlehash(1)
+ assert not wallet_synced()
+ await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
+ await time_out_assert(30, wallet_synced)
+ generated_funds += await full_node_api.farm_blocks_to_wallet(5, wallet_node.wallet_state_manager.main_wallet)
+ expected_more_balance = Balance(
+ confirmed_wallet_balance=uint128(generated_funds),
+ unconfirmed_wallet_balance=uint128(generated_funds),
+ spendable_balance=uint128(generated_funds),
+ max_send_amount=uint128(generated_funds),
+ unspent_coin_count=uint32(20),
+ )
+ async with wallet_node.wallet_state_manager.set_sync_mode(uint32(100)):
+ # During sync the balance cache should not become updated, so it still should have the old balance here
+ assert await wallet_node.get_balance(wallet_id) == expected_generated_balance
+ # Now after the sync context the cache should become updated to the newly genertated balance
+ assert await wallet_node.get_balance(wallet_id) == expected_more_balance
+ # Restart one more time and make sure the balance is still correct after start
+ await restart_with_fingerprint(initial_fingerprint)
+ assert await wallet_node.get_balance(wallet_id) == expected_more_balance
diff --git a/tests/wallet/test_wallet_retry.py b/tests/wallet/test_wallet_retry.py
--- a/tests/wallet/test_wallet_retry.py
+++ b/tests/wallet/test_wallet_retry.py
@@ -36,7 +36,7 @@ def assert_sb_not_in_pool(node: FullNodeAPI, sb: SpendBundle) -> None:
def evict_from_pool(node: FullNodeAPI, sb: SpendBundle) -> None:
- mempool_item = node.full_node.mempool_manager.mempool.get_spend_by_id(sb.name())
+ mempool_item = node.full_node.mempool_manager.mempool.get_item_by_id(sb.name())
assert mempool_item is not None
node.full_node.mempool_manager.mempool.remove_from_pool([mempool_item.name], MempoolRemoveReason.CONFLICT)
node.full_node.mempool_manager.remove_seen(sb.name())
diff --git a/tests/wallet/test_wallet_utils.py b/tests/wallet/test_wallet_utils.py
new file mode 100644
--- /dev/null
+++ b/tests/wallet/test_wallet_utils.py
@@ -0,0 +1,41 @@
+from __future__ import annotations
+
+from random import shuffle
+from typing import List, Optional, Tuple
+
+from chia_rs import Coin, CoinState
+
+from chia.wallet.util.wallet_sync_utils import sort_coin_states
+
+
+def dummy_coin_state(*, created_height: Optional[int], spent_height: Optional[int]) -> CoinState:
+ return CoinState(Coin(bytes(b"0" * 32), bytes(b"0" * 32), 0), spent_height, created_height)
+
+
+def heights(coin_states: List[CoinState]) -> List[Tuple[Optional[int], Optional[int]]]:
+ return [(coin_state.created_height, coin_state.spent_height) for coin_state in coin_states]
+
+
+def test_sort_coin_states() -> None:
+ sorted_coin_states = [
+ dummy_coin_state(created_height=None, spent_height=None),
+ dummy_coin_state(created_height=None, spent_height=None),
+ dummy_coin_state(created_height=1, spent_height=None),
+ dummy_coin_state(created_height=9, spent_height=10),
+ dummy_coin_state(created_height=10, spent_height=None),
+ dummy_coin_state(created_height=10, spent_height=10),
+ dummy_coin_state(created_height=10, spent_height=10),
+ dummy_coin_state(created_height=10, spent_height=11),
+ dummy_coin_state(created_height=11, spent_height=None),
+ dummy_coin_state(created_height=11, spent_height=11),
+ dummy_coin_state(created_height=10, spent_height=12),
+ dummy_coin_state(created_height=11, spent_height=12),
+ dummy_coin_state(created_height=12, spent_height=None),
+ dummy_coin_state(created_height=12, spent_height=12),
+ dummy_coin_state(created_height=1, spent_height=20),
+ dummy_coin_state(created_height=19, spent_height=20),
+ ]
+ unsorted_coin_states = sorted_coin_states.copy()
+ shuffle(unsorted_coin_states)
+ assert heights(unsorted_coin_states) != heights(sorted_coin_states)
+ assert heights(sort_coin_states(unsorted_coin_states)) == heights(sorted_coin_states)
diff --git a/tests/wallet/vc_wallet/__init__.py b/tests/wallet/vc_wallet/__init__.py
new file mode 100644
diff --git a/tests/wallet/vc_wallet/test_vc_lifecycle.py b/tests/wallet/vc_wallet/test_vc_lifecycle.py
new file mode 100644
--- /dev/null
+++ b/tests/wallet/vc_wallet/test_vc_lifecycle.py
@@ -0,0 +1,841 @@
+from __future__ import annotations
+
+from typing import List, Optional, Tuple
+
+import pytest
+from blspy import G2Element
+
+from chia.clvm.spend_sim import CostLogger, sim_and_client
+from chia.types.blockchain_format.coin import Coin
+from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_spend import CoinSpend
+from chia.types.mempool_inclusion_status import MempoolInclusionStatus
+from chia.types.spend_bundle import SpendBundle
+from chia.util.errors import Err
+from chia.util.hash import std_hash
+from chia.util.ints import uint32, uint64
+from chia.wallet.lineage_proof import LineageProof
+from chia.wallet.payment import Payment
+from chia.wallet.puzzles.singleton_top_layer_v1_1 import (
+ launch_conditions_and_coinsol,
+ puzzle_for_singleton,
+ solution_for_singleton,
+)
+from chia.wallet.uncurried_puzzle import uncurry_puzzle
+from chia.wallet.vc_wallet.cr_cat_drivers import CRCAT, ProofsChecker
+from chia.wallet.vc_wallet.vc_drivers import (
+ ACS_TRANSFER_PROGRAM,
+ VerifiedCredential,
+ construct_exigent_metadata_layer,
+ create_covenant_layer,
+ create_did_tp,
+ create_std_parent_morpher,
+ create_viral_backdoor,
+ match_covenant_layer,
+ match_did_tp,
+ match_viral_backdoor,
+ solve_covenant_layer,
+ solve_did_tp,
+ solve_viral_backdoor,
+)
+
+ACS: Program = Program.to([3, (1, "entropy"), 1, None])
+ACS_PH: bytes32 = ACS.get_tree_hash()
+MOCK_SINGLETON_MOD: Program = Program.to([2, 5, 11])
+MOCK_SINGLETON_MOD_HASH: bytes32 = MOCK_SINGLETON_MOD.get_tree_hash()
+MOCK_LAUNCHER_ID: bytes32 = bytes32([0] * 32)
+MOCK_LAUNCHER_HASH: bytes32 = bytes32([1] * 32)
+MOCK_SINGLETON: Program = MOCK_SINGLETON_MOD.curry(
+ (MOCK_SINGLETON_MOD_HASH, (MOCK_LAUNCHER_ID, MOCK_LAUNCHER_HASH)),
+ ACS,
+)
+
+
+@pytest.mark.asyncio
+async def test_covenant_layer(cost_logger: CostLogger) -> None:
+ async with sim_and_client() as (sim, client):
+ # Create a puzzle that will not pass the initial covenant check
+ FAKE_ACS: Program = Program.to([3, (1, "fake"), 1, None])
+ # The output puzzle will be the same for both
+ covenant_puzzle: Program = create_covenant_layer(ACS_PH, create_std_parent_morpher(ACS_PH), ACS)
+ assert match_covenant_layer(uncurry_puzzle(covenant_puzzle)) == (ACS_PH, create_std_parent_morpher(ACS_PH), ACS)
+ covenant_puzzle_hash: bytes32 = covenant_puzzle.get_tree_hash()
+
+ # Farm both coins
+ await sim.farm_block(FAKE_ACS.get_tree_hash())
+ await sim.farm_block(ACS_PH)
+
+ # Find and spend both
+ fake_acs_coin: Coin = (
+ await client.get_coin_records_by_puzzle_hashes([FAKE_ACS.get_tree_hash()], include_spent_coins=False)
+ )[0].coin
+ acs_coin: Coin = (await client.get_coin_records_by_puzzle_hashes([ACS_PH], include_spent_coins=False))[0].coin
+ await client.push_tx(
+ cost_logger.add_cost(
+ "2x ACS spends - create one coin",
+ SpendBundle(
+ [
+ CoinSpend(
+ fake_acs_coin,
+ FAKE_ACS,
+ Program.to([[51, covenant_puzzle_hash, fake_acs_coin.amount]]),
+ ),
+ CoinSpend(
+ acs_coin,
+ ACS,
+ Program.to([[51, covenant_puzzle_hash, acs_coin.amount]]),
+ ),
+ ],
+ G2Element(),
+ ),
+ )
+ )
+ await sim.farm_block()
+
+ # Find the covenant coins with equal puzzles
+ fake_acs_cov: Coin = (
+ await client.get_coin_records_by_parent_ids([fake_acs_coin.name()], include_spent_coins=False)
+ )[0].coin
+ acs_cov: Coin = (await client.get_coin_records_by_parent_ids([acs_coin.name()], include_spent_coins=False))[
+ 0
+ ].coin
+
+ # With the honest coin, attempt to spend the non-eve case too soon
+ result: Tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx(
+ SpendBundle(
+ [
+ CoinSpend(
+ acs_cov,
+ covenant_puzzle,
+ solve_covenant_layer(
+ LineageProof(
+ parent_name=acs_coin.parent_coin_info,
+ inner_puzzle_hash=ACS_PH,
+ amount=uint64(acs_coin.amount),
+ ),
+ Program.to(None),
+ Program.to([[51, covenant_puzzle_hash, acs_coin.amount]]),
+ ),
+ ),
+ ],
+ G2Element(),
+ )
+ )
+ assert result == (MempoolInclusionStatus.FAILED, Err.ASSERT_MY_PARENT_ID_FAILED)
+
+ # Try the initial spend, which the fake origin coin should fail
+ for parent, cov in ((fake_acs_coin, fake_acs_cov), (acs_coin, acs_cov)):
+ result = await client.push_tx(
+ cost_logger.add_cost(
+ "Covenant layer eve spend - one create coin",
+ SpendBundle(
+ [
+ CoinSpend(
+ cov,
+ covenant_puzzle,
+ solve_covenant_layer(
+ LineageProof(parent_name=parent.parent_coin_info, amount=uint64(parent.amount)),
+ Program.to(None),
+ Program.to([[51, covenant_puzzle_hash, cov.amount]]),
+ ),
+ ),
+ ],
+ G2Element(),
+ ),
+ )
+ )
+ if parent == fake_acs_coin:
+ assert result == (MempoolInclusionStatus.FAILED, Err.ASSERT_MY_PARENT_ID_FAILED)
+ else:
+ assert result == (MempoolInclusionStatus.SUCCESS, None)
+
+ await sim.farm_block()
+
+ new_acs_cov: Coin = (await client.get_coin_records_by_parent_ids([acs_cov.name()], include_spent_coins=False))[
+ 0
+ ].coin
+
+ result = await client.push_tx(
+ cost_logger.add_cost(
+ "Covenant layer non-eve spend - one create coin",
+ SpendBundle(
+ [
+ CoinSpend(
+ new_acs_cov,
+ covenant_puzzle,
+ solve_covenant_layer(
+ LineageProof(
+ parent_name=acs_cov.parent_coin_info,
+ inner_puzzle_hash=ACS_PH,
+ amount=uint64(acs_cov.amount),
+ ),
+ Program.to(None),
+ Program.to([[51, covenant_puzzle_hash, new_acs_cov.amount]]),
+ ),
+ ),
+ ],
+ G2Element(),
+ ),
+ )
+ )
+ assert result == (MempoolInclusionStatus.SUCCESS, None)
+
+
+@pytest.mark.asyncio
+async def test_did_tp(cost_logger: CostLogger) -> None:
+ async with sim_and_client() as (sim, client):
+ # Make a mock exigent metadata layer
+ # Prepends new metadata and new transfer program as REMARK condition to conditions of TP
+ # (mod (METADATA TP solution) (a (q . (c (c (q . 1) (c 2 (c 5 ()))) 11)) (a TP (list METADATA () solution))))
+ # (a (q 4 (c (q . 1) (c 2 (c 5 ()))) 11) (a 5 (c 2 (c () (c 11 ())))))
+ MOCK_OWNERSHIP_LAYER: Program = Program.fromhex(
+ "ff02ffff01ff04ffff04ffff0101ffff04ff02ffff04ff05ff80808080ff0b80ffff02ff05ffff04ff02ffff04ff80ffff04ff0bff808080808080" # noqa: E501
+ )
+ # Create it with mock singleton info
+ transfer_program: Program = create_did_tp(MOCK_SINGLETON_MOD_HASH, MOCK_LAUNCHER_HASH)
+ assert match_did_tp(uncurry_puzzle(transfer_program)) == ()
+ eml_puzzle: Program = MOCK_OWNERSHIP_LAYER.curry((MOCK_LAUNCHER_ID, None), transfer_program)
+
+ await sim.farm_block(eml_puzzle.get_tree_hash())
+ eml_coin: Coin = (
+ await client.get_coin_records_by_puzzle_hashes([eml_puzzle.get_tree_hash()], include_spent_coins=False)
+ )[0].coin
+
+ # Define parameters for next few spend attempts
+ provider_innerpuzhash: bytes32 = ACS_PH
+ my_coin_id: bytes32 = eml_coin.name()
+ new_metadata: Program = Program.to("SUCCESS")
+ new_tp_hash: Program = Program.to("NEW TP").get_tree_hash()
+ bad_data: bytes32 = bytes32([0] * 32)
+
+ # Try to update metadata and tp without any announcement
+ result: Tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx(
+ SpendBundle(
+ [
+ CoinSpend(
+ eml_coin,
+ eml_puzzle,
+ Program.to(
+ [
+ solve_did_tp(
+ bad_data,
+ my_coin_id,
+ new_metadata,
+ new_tp_hash,
+ )
+ ]
+ ),
+ )
+ ],
+ G2Element(),
+ )
+ )
+ assert result == (MempoolInclusionStatus.FAILED, Err.ASSERT_ANNOUNCE_CONSUMED_FAILED)
+
+ # Create the "DID" now
+ await sim.farm_block(MOCK_SINGLETON.get_tree_hash())
+ did_coin: Coin = (
+ await client.get_coin_records_by_puzzle_hashes([MOCK_SINGLETON.get_tree_hash()], include_spent_coins=False)
+ )[0].coin
+ did_authorization_spend: CoinSpend = CoinSpend(
+ did_coin,
+ MOCK_SINGLETON,
+ Program.to([[[62, std_hash(my_coin_id + new_metadata.get_tree_hash() + new_tp_hash)]]]),
+ )
+
+ # Try to pass the wrong coin id
+ result = await client.push_tx(
+ SpendBundle(
+ [
+ CoinSpend(
+ eml_coin,
+ eml_puzzle,
+ Program.to(
+ [
+ solve_did_tp(
+ provider_innerpuzhash,
+ bad_data,
+ new_metadata,
+ new_tp_hash,
+ )
+ ]
+ ),
+ ),
+ did_authorization_spend,
+ ],
+ G2Element(),
+ )
+ )
+ assert result == (MempoolInclusionStatus.FAILED, Err.ASSERT_MY_COIN_ID_FAILED)
+
+ # Actually use announcement
+ successful_spend: SpendBundle = cost_logger.add_cost(
+ "Fake Ownership Layer - NFT DID TP",
+ SpendBundle(
+ [
+ CoinSpend(
+ eml_coin,
+ eml_puzzle,
+ Program.to(
+ [
+ solve_did_tp(
+ provider_innerpuzhash,
+ my_coin_id,
+ new_metadata,
+ new_tp_hash,
+ )
+ ]
+ ),
+ ),
+ did_authorization_spend,
+ ],
+ G2Element(),
+ ),
+ )
+ result = await client.push_tx(successful_spend)
+ assert result == (MempoolInclusionStatus.SUCCESS, None)
+
+ remark_condition: Program = next(
+ condition
+ for condition in successful_spend.coin_spends[0]
+ .puzzle_reveal.to_program()
+ .run(successful_spend.coin_spends[0].solution.to_program())
+ .as_iter()
+ if condition.first() == Program.to(1)
+ )
+ assert remark_condition == Program.to([1, (MOCK_LAUNCHER_ID, new_metadata), new_tp_hash])
+
+
+@pytest.mark.asyncio
+async def test_viral_backdoor(cost_logger: CostLogger) -> None:
+ async with sim_and_client() as (sim, client):
+ # Setup and farm the puzzle
+ hidden_puzzle: Program = Program.to((1, [[61, 1]])) # assert a coin announcement that the solution tells us
+ hidden_puzzle_hash: bytes32 = hidden_puzzle.get_tree_hash()
+ p2_either_puzzle: Program = create_viral_backdoor(hidden_puzzle_hash, ACS_PH)
+ assert match_viral_backdoor(uncurry_puzzle(p2_either_puzzle)) == (hidden_puzzle_hash, ACS_PH)
+
+ await sim.farm_block(p2_either_puzzle.get_tree_hash())
+ p2_either_coin: Coin = (
+ await client.get_coin_records_by_puzzle_hashes(
+ [p2_either_puzzle.get_tree_hash()], include_spent_coins=False
+ )
+ )[0].coin
+
+ # Reveal the wrong puzzle
+ result: Tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx(
+ SpendBundle(
+ [
+ CoinSpend(
+ p2_either_coin,
+ p2_either_puzzle,
+ solve_viral_backdoor(
+ ACS,
+ Program.to(None),
+ hidden=True,
+ ),
+ )
+ ],
+ G2Element(),
+ )
+ )
+ assert result == (MempoolInclusionStatus.FAILED, Err.GENERATOR_RUNTIME_ERROR)
+
+ # Spend the hidden puzzle (make announcement fail)
+ result = await client.push_tx(
+ SpendBundle(
+ [
+ CoinSpend(
+ p2_either_coin,
+ p2_either_puzzle,
+ solve_viral_backdoor(
+ hidden_puzzle,
+ Program.to(bytes32([0] * 32)),
+ hidden=True,
+ ),
+ )
+ ],
+ G2Element(),
+ )
+ )
+ assert result == (MempoolInclusionStatus.FAILED, Err.ASSERT_ANNOUNCE_CONSUMED_FAILED)
+
+ # Spend the inner puzzle
+ brick_hash: bytes32 = bytes32([0] * 32)
+ wrapped_brick_hash: bytes32 = create_viral_backdoor(
+ hidden_puzzle_hash,
+ brick_hash,
+ ).get_tree_hash()
+ result = await client.push_tx(
+ cost_logger.add_cost(
+ "Viral backdoor spend - one create coin",
+ SpendBundle(
+ [
+ CoinSpend(
+ p2_either_coin,
+ p2_either_puzzle,
+ solve_viral_backdoor(
+ ACS,
+ Program.to([[51, brick_hash, 0]]),
+ ),
+ )
+ ],
+ G2Element(),
+ ),
+ )
+ )
+ assert result == (MempoolInclusionStatus.SUCCESS, None)
+
+ await sim.farm_block()
+
+ assert len(await client.get_coin_records_by_puzzle_hashes([wrapped_brick_hash], include_spent_coins=False)) > 0
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("test_syncing", [True, False])
+async def test_vc_lifecycle(test_syncing: bool, cost_logger: CostLogger) -> None:
+ async with sim_and_client() as (sim, client):
+ RUN_PUZ_PUZ: Program = Program.to([2, 1, None]) # (a 1 ()) takes a puzzle as its solution and runs it with ()
+ RUN_PUZ_PUZ_PH: bytes32 = RUN_PUZ_PUZ.get_tree_hash()
+ await sim.farm_block(RUN_PUZ_PUZ_PH)
+ await sim.farm_block(RUN_PUZ_PUZ_PH)
+ vc_fund_coin: Coin = (
+ await client.get_coin_records_by_puzzle_hashes([RUN_PUZ_PUZ_PH], include_spent_coins=False)
+ )[0].coin
+ did_fund_coin: Coin = (
+ await client.get_coin_records_by_puzzle_hashes([RUN_PUZ_PUZ_PH], include_spent_coins=False)
+ )[1].coin
+ other_did_fund_coin: Coin = (
+ await client.get_coin_records_by_puzzle_hashes([RUN_PUZ_PUZ_PH], include_spent_coins=False)
+ )[2].coin
+
+ # Gotta make some DIDs first
+ launcher_id: bytes32
+ lineage_proof: LineageProof
+ did: Coin
+ other_launcher_id: bytes32
+ other_lineage_proof: LineageProof
+ other_did: Coin
+ for fund_coin in (did_fund_coin, other_did_fund_coin):
+ conditions, launcher_spend = launch_conditions_and_coinsol(
+ fund_coin,
+ ACS,
+ [],
+ uint64(1),
+ )
+ await client.push_tx(
+ SpendBundle(
+ [
+ CoinSpend(
+ fund_coin,
+ RUN_PUZ_PUZ,
+ Program.to((1, conditions)),
+ ),
+ launcher_spend,
+ ],
+ G2Element(),
+ )
+ )
+ await sim.farm_block()
+ if fund_coin == did_fund_coin:
+ launcher_id = launcher_spend.coin.name()
+ lineage_proof = LineageProof(
+ parent_name=launcher_spend.coin.parent_coin_info,
+ amount=uint64(launcher_spend.coin.amount),
+ )
+ did = (await client.get_coin_records_by_parent_ids([launcher_id], include_spent_coins=False))[0].coin
+ else:
+ other_launcher_id = launcher_spend.coin.name()
+ other_lineage_proof = LineageProof(
+ parent_name=launcher_spend.coin.parent_coin_info,
+ amount=uint64(launcher_spend.coin.amount),
+ )
+ other_did = (
+ await client.get_coin_records_by_parent_ids([other_launcher_id], include_spent_coins=False)
+ )[0].coin
+
+ # Now let's launch the VC
+ vc: VerifiedCredential
+ dpuz, coin_spends, vc = VerifiedCredential.launch(
+ vc_fund_coin,
+ launcher_id,
+ ACS_PH,
+ [bytes32([0] * 32)],
+ )
+ result: Tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx(
+ cost_logger.add_cost(
+ "Launch VC",
+ SpendBundle(
+ [
+ CoinSpend(
+ vc_fund_coin,
+ RUN_PUZ_PUZ,
+ dpuz,
+ ),
+ *coin_spends,
+ ],
+ G2Element(),
+ ),
+ )
+ )
+ await sim.farm_block()
+ assert result == (MempoolInclusionStatus.SUCCESS, None)
+ if test_syncing:
+ vc = VerifiedCredential.get_next_from_coin_spend(coin_spends[1])
+ assert VerifiedCredential.is_vc(uncurry_puzzle(coin_spends[1].puzzle_reveal.to_program()))[0]
+ assert vc.construct_puzzle().get_tree_hash() == vc.coin.puzzle_hash
+ assert len(await client.get_coin_records_by_puzzle_hashes([vc.coin.puzzle_hash], include_spent_coins=False)) > 0
+
+ # Update the proofs with a proper announcement
+ NEW_PROOFS: Program = Program.to((("test", True), ("test2", True)))
+ MALICIOUS_PROOFS: Program = Program.to(("malicious", True))
+ NEW_PROOF_HASH: bytes32 = NEW_PROOFS.get_tree_hash()
+ expected_announcement, update_spend, vc = vc.do_spend(
+ ACS,
+ Program.to([[51, ACS_PH, vc.coin.amount], vc.magic_condition_for_new_proofs(NEW_PROOF_HASH, ACS_PH)]),
+ new_proof_hash=NEW_PROOF_HASH,
+ )
+ for use_did, correct_did in ((False, None), (True, False), (True, True)):
+ result = await client.push_tx(
+ cost_logger.add_cost(
+ "Update VC proofs (eve covenant spend) - DID providing announcement",
+ SpendBundle(
+ [
+ *(
+ [
+ CoinSpend(
+ did if correct_did else other_did,
+ puzzle_for_singleton(
+ launcher_id if correct_did else other_launcher_id,
+ ACS,
+ ),
+ solution_for_singleton(
+ lineage_proof if correct_did else other_lineage_proof,
+ uint64(did.amount) if correct_did else uint64(other_did.amount),
+ Program.to(
+ [
+ [51, ACS_PH, did.amount if correct_did else other_did.amount],
+ [62, expected_announcement],
+ ]
+ ),
+ ),
+ )
+ ]
+ if use_did
+ else []
+ ),
+ update_spend,
+ ],
+ G2Element(),
+ ),
+ )
+ )
+ if use_did:
+ if correct_did:
+ assert result == (MempoolInclusionStatus.SUCCESS, None)
+ else:
+ assert result == (MempoolInclusionStatus.FAILED, Err.ASSERT_ANNOUNCE_CONSUMED_FAILED)
+ else:
+ assert result == (MempoolInclusionStatus.FAILED, Err.ASSERT_ANNOUNCE_CONSUMED_FAILED)
+ await sim.farm_block()
+ if test_syncing:
+ vc = VerifiedCredential.get_next_from_coin_spend(update_spend)
+ assert VerifiedCredential.is_vc(uncurry_puzzle(update_spend.puzzle_reveal.to_program()))[0]
+
+ # Now lets farm a funds for some CR-CATs
+ await sim.farm_block(RUN_PUZ_PUZ_PH)
+ await sim.farm_block(RUN_PUZ_PUZ_PH)
+ cr_fund_coin_1: Coin = (
+ await client.get_coin_records_by_puzzle_hashes([RUN_PUZ_PUZ_PH], include_spent_coins=False)
+ )[0].coin
+ cr_fund_coin_2: Coin = (
+ await client.get_coin_records_by_puzzle_hashes([RUN_PUZ_PUZ_PH], include_spent_coins=False)
+ )[1].coin
+ cr_fund_coin_3: Coin = (
+ await client.get_coin_records_by_puzzle_hashes([RUN_PUZ_PUZ_PH], include_spent_coins=False)
+ )[2].coin
+ cr_fund_coin_4: Coin = (
+ await client.get_coin_records_by_puzzle_hashes([RUN_PUZ_PUZ_PH], include_spent_coins=False)
+ )[3].coin
+
+ # Launch the CR-CATs
+ malicious_cr_1: CRCAT
+ malicious_cr_2: CRCAT
+ for cr_coin_1, cr_coin_2 in ((cr_fund_coin_1, cr_fund_coin_2), (cr_fund_coin_3, cr_fund_coin_4)):
+ if cr_coin_1 == cr_fund_coin_1:
+ proofs = ["malicious"]
+ else:
+ proofs = ["test", "test2"]
+ proofs_checker: ProofsChecker = ProofsChecker(proofs)
+ AUTHORIZED_PROVIDERS: List[bytes32] = [launcher_id]
+ dpuz_1, launch_crcat_spend_1, cr_1 = CRCAT.launch(
+ cr_coin_1,
+ Payment(ACS_PH, uint64(cr_coin_1.amount), []),
+ Program.to(None),
+ Program.to(None),
+ AUTHORIZED_PROVIDERS,
+ proofs_checker.as_program(),
+ )
+ dpuz_2, launch_crcat_spend_2, cr_2 = CRCAT.launch(
+ cr_coin_2,
+ Payment(ACS_PH, uint64(cr_coin_2.amount), []),
+ Program.to(None),
+ Program.to(None),
+ AUTHORIZED_PROVIDERS,
+ proofs_checker.as_program(),
+ )
+ result = await client.push_tx(
+ SpendBundle(
+ [
+ CoinSpend(
+ cr_coin_1,
+ RUN_PUZ_PUZ,
+ dpuz_1,
+ ),
+ CoinSpend(
+ cr_coin_2,
+ RUN_PUZ_PUZ,
+ dpuz_2,
+ ),
+ launch_crcat_spend_1,
+ launch_crcat_spend_2,
+ ],
+ G2Element(),
+ )
+ )
+ assert result == (MempoolInclusionStatus.SUCCESS, None)
+ await sim.farm_block()
+ if test_syncing:
+ cr_1 = CRCAT.get_next_from_coin_spend(launch_crcat_spend_1)[0]
+ cr_2 = CRCAT.get_next_from_coin_spend(launch_crcat_spend_2)[0]
+ assert len(await client.get_coin_records_by_names([cr_1.coin.name()], include_spent_coins=False)) > 0
+ assert len(await client.get_coin_records_by_names([cr_2.coin.name()], include_spent_coins=False)) > 0
+ if cr_coin_1 == cr_fund_coin_1:
+ malicious_cr_1 = cr_1
+ malicious_cr_2 = cr_2
+
+ for error in (
+ "forget_vc",
+ "make_banned_announcement",
+ "use_malicious_cats",
+ "attempt_honest_cat_piggyback",
+ None,
+ ):
+ # The CR-CAT coin spends
+ expected_announcements, cr_cat_spends, new_crcats = CRCAT.spend_many(
+ [
+ (
+ cr_1 if error != "use_malicious_cats" else malicious_cr_1,
+ ACS,
+ Program.to(
+ [
+ [
+ 51,
+ ACS_PH,
+ cr_1.coin.amount if error != "use_malicious_cats" else malicious_cr_1.coin.amount,
+ ],
+ *([[60, b"\xcd" + bytes(32)]] if error == "make_banned_announcement" else []),
+ ]
+ ),
+ ),
+ (
+ cr_2 if error != "use_malicious_cats" else malicious_cr_2,
+ ACS,
+ Program.to(
+ [
+ [
+ 51,
+ ACS_PH,
+ cr_2.coin.amount if error != "use_malicious_cats" else malicious_cr_2.coin.amount,
+ ]
+ ]
+ ),
+ ),
+ ],
+ NEW_PROOFS if error != "use_malicious_cats" else MALICIOUS_PROOFS,
+ Program.to(None),
+ launcher_id,
+ vc.launcher_id,
+ vc.wrap_inner_with_backdoor().get_tree_hash(),
+ )
+
+ # Try to spend the coin to ourselves
+ _, auth_spend, new_vc = vc.do_spend(
+ ACS,
+ Program.to(
+ [
+ [51, ACS_PH, vc.coin.amount],
+ [
+ 62,
+ cr_1.expected_announcement()
+ if error not in ["use_malicious_cats", "attempt_honest_cat_piggyback"]
+ else malicious_cr_1.expected_announcement(),
+ ],
+ [
+ 62,
+ cr_2.expected_announcement()
+ if error not in ["use_malicious_cats", "attempt_honest_cat_piggyback"]
+ else malicious_cr_2.expected_announcement(),
+ ],
+ *([61, a] for a in expected_announcements),
+ vc.standard_magic_condition(),
+ ]
+ ),
+ )
+
+ result = await client.push_tx(
+ cost_logger.add_cost(
+ "CR-CATx2 w/ VC announcement, Standard Proof Checker (2 flags)",
+ SpendBundle(
+ [
+ *cr_cat_spends,
+ *([auth_spend] if error != "forget_vc" else []),
+ ],
+ G2Element(),
+ ),
+ )
+ )
+ if error is None:
+ assert result == (MempoolInclusionStatus.SUCCESS, None)
+ if test_syncing:
+ assert all(
+ CRCAT.is_cr_cat(uncurry_puzzle(spend.puzzle_reveal.to_program())) for spend in cr_cat_spends
+ )
+ new_crcats = [crcat for spend in cr_cat_spends for crcat in CRCAT.get_next_from_coin_spend(spend)]
+ vc = VerifiedCredential.get_next_from_coin_spend(auth_spend)
+ else:
+ vc = new_vc
+ await sim.farm_block()
+ elif error in ["forget_vc", "use_malicious_cats", "attempt_honest_cat_piggyback"]:
+ assert result == (MempoolInclusionStatus.FAILED, Err.ASSERT_ANNOUNCE_CONSUMED_FAILED)
+ elif error == "make_banned_announcement":
+ assert result == (MempoolInclusionStatus.FAILED, Err.GENERATOR_RUNTIME_ERROR)
+
+ save_point: uint32 = sim.block_height
+ # Yoink the coin away from the inner puzzle
+ for correct_did in (False, True):
+ new_did = (
+ (await client.get_coin_records_by_parent_ids([did.name()], include_spent_coins=False))[0].coin
+ if correct_did
+ else other_did
+ )
+ expected_announcement, yoink_spend = vc.activate_backdoor(ACS_PH)
+ result = await client.push_tx(
+ cost_logger.add_cost(
+ "VC yoink by DID provider",
+ SpendBundle(
+ [
+ CoinSpend(
+ new_did,
+ puzzle_for_singleton(
+ launcher_id if correct_did else other_launcher_id,
+ ACS,
+ ),
+ solution_for_singleton(
+ LineageProof(
+ parent_name=did.parent_coin_info,
+ inner_puzzle_hash=ACS_PH,
+ amount=uint64(did.amount),
+ )
+ if correct_did
+ else other_lineage_proof,
+ uint64(new_did.amount),
+ Program.to([[51, ACS_PH, new_did.amount], [62, expected_announcement]]),
+ ),
+ ),
+ yoink_spend,
+ ],
+ G2Element(),
+ ),
+ )
+ )
+ if correct_did:
+ assert result == (MempoolInclusionStatus.SUCCESS, None)
+ await sim.farm_block()
+ if test_syncing:
+ with pytest.raises(ValueError):
+ VerifiedCredential.get_next_from_coin_spend(yoink_spend)
+ else:
+ assert result == (MempoolInclusionStatus.FAILED, Err.ASSERT_ANNOUNCE_CONSUMED_FAILED)
+
+ # Verify the end state
+ new_singletons_puzzle_reveal: Program = puzzle_for_singleton(
+ vc.launcher_id,
+ construct_exigent_metadata_layer(
+ None,
+ ACS_TRANSFER_PROGRAM,
+ ACS,
+ ),
+ )
+
+ assert (
+ len(
+ await client.get_coin_records_by_puzzle_hashes(
+ [new_singletons_puzzle_reveal.get_tree_hash()], include_spent_coins=False
+ )
+ )
+ > 0
+ )
+ assert (
+ len(
+ await client.get_coin_records_by_names(
+ [crcat.coin.name() for crcat in new_crcats], include_spent_coins=False
+ )
+ )
+ == 2
+ )
+
+ # Rewind to pre-yoink state
+ await sim.rewind(save_point)
+
+ _, clear_spend, _ = vc.do_spend(
+ ACS,
+ Program.to(
+ [
+ [51, ACS_PH, vc.coin.amount],
+ [
+ -10,
+ vc.eml_lineage_proof.to_program(),
+ [
+ Program.to(vc.eml_lineage_proof.parent_proof_hash),
+ vc.launcher_id,
+ ],
+ ACS_TRANSFER_PROGRAM.get_tree_hash(),
+ ],
+ ]
+ ),
+ )
+ result = await client.push_tx(
+ cost_logger.add_cost(
+ "VC clear by user",
+ SpendBundle(
+ [clear_spend],
+ G2Element(),
+ ),
+ )
+ )
+ assert result == (MempoolInclusionStatus.SUCCESS, None)
+ await sim.farm_block()
+ if test_syncing:
+ with pytest.raises(ValueError):
+ VerifiedCredential.get_next_from_coin_spend(clear_spend)
+
+ # Verify the end state
+ cleared_singletons_puzzle_reveal: Program = puzzle_for_singleton(
+ vc.launcher_id,
+ construct_exigent_metadata_layer(
+ None,
+ ACS_TRANSFER_PROGRAM,
+ vc.wrap_inner_with_backdoor(),
+ ),
+ )
+
+ assert (
+ len(
+ await client.get_coin_records_by_puzzle_hashes(
+ [cleared_singletons_puzzle_reveal.get_tree_hash()], include_spent_coins=False
+ )
+ )
+ > 0
+ )
diff --git a/tests/wallet/vc_wallet/test_vc_wallet.py b/tests/wallet/vc_wallet/test_vc_wallet.py
new file mode 100644
--- /dev/null
+++ b/tests/wallet/vc_wallet/test_vc_wallet.py
@@ -0,0 +1,141 @@
+from __future__ import annotations
+
+from typing import Any, Optional
+
+import pytest
+from typing_extensions import Literal
+
+from chia.rpc.wallet_rpc_client import WalletRpcClient
+from chia.simulator.full_node_simulator import FullNodeSimulator
+from chia.simulator.time_out_assert import time_out_assert, time_out_assert_not_none
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.peer_info import PeerInfo
+from chia.util.ints import uint16, uint64
+from chia.wallet.did_wallet.did_wallet import DIDWallet
+from chia.wallet.util.wallet_types import WalletType
+from chia.wallet.vc_wallet.vc_store import VCProofs, VCRecord
+
+
+@pytest.mark.parametrize(
+ "trusted",
+ [True, False],
+)
+@pytest.mark.asyncio
+async def test_vc_lifecycle(self_hostname: str, two_wallet_nodes_services: Any, trusted: Any) -> None:
+ num_blocks = 1
+ full_nodes, wallets, bt = two_wallet_nodes_services
+ full_node_api: FullNodeSimulator = full_nodes[0]._api
+ full_node_server = full_node_api.full_node.server
+ wallet_service_0 = wallets[0]
+ wallet_service_1 = wallets[1]
+ wallet_node_0 = wallet_service_0._node
+ wallet_node_1 = wallet_service_1._node
+ wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
+ wallet_1 = wallet_node_1.wallet_state_manager.main_wallet # just to farm to for processing TXs
+
+ client_0 = await WalletRpcClient.create(
+ bt.config["self_hostname"],
+ wallet_service_0.rpc_server.listen_port,
+ wallet_service_0.root_path,
+ wallet_service_0.config,
+ )
+
+ if trusted:
+ wallet_node_0.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ wallet_node_1.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ else:
+ wallet_node_0.config["trusted_peers"] = {}
+ wallet_node_1.config["trusted_peers"] = {}
+
+ await wallet_node_0.server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
+ await wallet_node_1.server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
+ await full_node_api.farm_blocks_to_wallet(count=num_blocks, wallet=wallet_0)
+ confirmed_balance: int = await wallet_0.get_confirmed_balance()
+ did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet(
+ wallet_node_0.wallet_state_manager, wallet_0, uint64(1)
+ )
+ confirmed_balance -= 1
+ spend_bundle_list = await wallet_node_0.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(did_wallet.id())
+
+ spend_bundle = spend_bundle_list[0].spend_bundle
+ assert spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
+
+ await full_node_api.farm_blocks_to_wallet(count=num_blocks, wallet=wallet_1)
+ await time_out_assert(15, wallet_0.get_confirmed_balance, confirmed_balance)
+ did_id = bytes32.from_hexstr(did_wallet.get_my_DID())
+ vc_record, txs = await client_0.vc_mint(did_id, target_address=await wallet_0.get_new_puzzlehash(), fee=uint64(200))
+ confirmed_balance -= 1
+ confirmed_balance -= 200
+ spend_bundle = next(tx.spend_bundle for tx in txs if tx.spend_bundle is not None)
+ await time_out_assert_not_none(30, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
+ await full_node_api.farm_blocks_to_wallet(count=num_blocks, wallet=wallet_1)
+ await time_out_assert(15, wallet_0.get_confirmed_balance, confirmed_balance)
+ vc_wallet = await wallet_node_0.wallet_state_manager.get_all_wallet_info_entries(wallet_type=WalletType.VC)
+ assert len(vc_wallet) == 1
+ new_vc_record: Optional[VCRecord] = await client_0.vc_get(vc_record.vc.launcher_id)
+ assert new_vc_record is not None
+
+ assert did_wallet.did_info.current_inner is not None
+ # Spend VC
+ proofs: VCProofs = VCProofs({"foo": "bar", "baz": "qux", "corge": "grault"})
+ proof_root: bytes32 = proofs.root()
+ txs = await client_0.vc_spend(
+ vc_record.vc.launcher_id,
+ new_proof_hash=proof_root,
+ fee=uint64(100),
+ )
+ confirmed_balance -= 100
+ spend_bundle = next(tx.spend_bundle for tx in txs if tx.spend_bundle is not None)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
+ await full_node_api.farm_blocks_to_wallet(count=num_blocks, wallet=wallet_1)
+ await time_out_assert(15, wallet_0.get_confirmed_balance, confirmed_balance)
+ vc_record_updated: Optional[VCRecord] = await client_0.vc_get(vc_record.vc.launcher_id)
+ assert vc_record_updated is not None
+ assert vc_record_updated.vc.proof_hash == proof_root
+
+ # Do a mundane spend
+ txs = await client_0.vc_spend(vc_record.vc.launcher_id)
+ spend_bundle = next(tx.spend_bundle for tx in txs if tx.spend_bundle is not None)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
+ await full_node_api.farm_blocks_to_wallet(count=num_blocks, wallet=wallet_1)
+ await time_out_assert(15, wallet_0.get_confirmed_balance, confirmed_balance)
+
+ async def check_vc_record_has_parent_id(
+ parent_id: bytes32, client: WalletRpcClient, launcher_id: bytes32
+ ) -> Optional[Literal[True]]:
+ vc_record = await client.vc_get(launcher_id)
+ result: Optional[Literal[True]] = None
+ if vc_record is not None:
+ result = True if vc_record.vc.coin.parent_coin_info == parent_id else None
+ return result
+
+ await time_out_assert_not_none(
+ 10, check_vc_record_has_parent_id, vc_record_updated.vc.coin.name(), client_0, vc_record.vc.launcher_id
+ )
+ vc_record_updated = await client_0.vc_get(vc_record.vc.launcher_id)
+ assert vc_record_updated is not None
+
+ # Add proofs to DB
+ await client_0.vc_add_proofs(proofs.key_value_pairs)
+ assert await client_0.vc_get_proofs_for_root(proof_root) == proofs.key_value_pairs
+ vc_records, fetched_proofs = await client_0.vc_get_list()
+ assert len(vc_records) == 1
+ assert fetched_proofs[proof_root.hex()] == proofs.key_value_pairs
+
+ # Revoke VC
+ txs = await client_0.vc_revoke(vc_record_updated.vc.coin.parent_coin_info, uint64(1))
+ confirmed_balance -= 1
+ spend_bundle = next(tx.spend_bundle for tx in txs if tx.spend_bundle is not None)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
+ await full_node_api.farm_blocks_to_wallet(count=num_blocks, wallet=wallet_1)
+ await time_out_assert(15, wallet_0.get_confirmed_balance, confirmed_balance)
+ vc_record_revoked: Optional[VCRecord] = await client_0.vc_get(vc_record.vc.launcher_id)
+ assert vc_record_revoked is None
+ assert (
+ len(await (await wallet_node_0.wallet_state_manager.get_or_create_vc_wallet()).store.get_unconfirmed_vcs()) == 0
+ )
diff --git a/tools/test_full_sync.py b/tools/test_full_sync.py
--- a/tools/test_full_sync.py
+++ b/tools/test_full_sync.py
@@ -7,11 +7,12 @@
import logging
import os
import shutil
+import sys
import tempfile
import time
from contextlib import contextmanager
from pathlib import Path
-from typing import Callable, Iterator, List, Optional
+from typing import Callable, Iterator, List, Optional, cast
import aiosqlite
import click
@@ -21,6 +22,7 @@
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.full_node.full_node import FullNode
from chia.server.outbound_message import Message, NodeType
+from chia.server.server import ChiaServer
from chia.server.ws_connection import WSChiaConnection
from chia.simulator.block_tools import make_unfinished_block
from chia.types.blockchain_format.sized_bytes import bytes32
@@ -44,6 +46,9 @@ def emit(self, record):
@contextmanager
def enable_profiler(profile: bool, counter: int) -> Iterator[None]:
+ if sys.version_info < (3, 8):
+ raise Exception(f"Python 3.8 or higher required, running with: {sys.version}")
+
if not profile:
yield
return
@@ -107,6 +112,9 @@ async def run_sync_test(
node_profiler: bool,
start_at_checkpoint: Optional[str],
) -> None:
+ if sys.version_info < (3, 8):
+ raise Exception(f"Python 3.8 or higher required, running with: {sys.version}")
+
logger = logging.getLogger()
logger.setLevel(logging.WARNING)
handler = logging.FileHandler("test-full-sync.log")
@@ -144,7 +152,7 @@ async def run_sync_test(
)
try:
- full_node.set_server(FakeServer()) # type: ignore[arg-type]
+ full_node.set_server(cast(ChiaServer, FakeServer()))
await full_node._start()
peak = full_node.blockchain.get_peak()
@@ -153,7 +161,7 @@ async def run_sync_test(
else:
height = 0
- peer: WSChiaConnection = FakePeer() # type: ignore[assignment]
+ peer: WSChiaConnection = cast(WSChiaConnection, FakePeer())
print()
counter = 0
@@ -238,7 +246,7 @@ def main() -> None:
pass
-@main.command("run", short_help="run simulated full sync from an existing blockchain db")
+@main.command("run", help="run simulated full sync from an existing blockchain db")
@click.argument("file", type=click.Path(), required=True)
@click.option("--db-version", type=int, required=False, default=2, help="the DB version to use in simulated node")
@click.option("--profile", is_flag=True, required=False, default=False, help="dump CPU profiles for slow batches")
@@ -302,7 +310,7 @@ def run(
)
-@main.command("analyze", short_help="generate call stacks for all profiles dumped to current directory")
+@main.command("analyze", help="generate call stacks for all profiles dumped to current directory")
def analyze() -> None:
from glob import glob
from shlex import quote
@@ -314,7 +322,7 @@ def analyze() -> None:
check_call(f"gprof2dot -f pstats {quote(input_file)} | dot -T png >{quote(output)}", shell=True)
-@main.command("create-checkpoint", short_help="sync the full node up to specified height and save its state")
+@main.command("create-checkpoint", help="sync the full node up to specified height and save its state")
@click.argument("file", type=click.Path(), required=True)
@click.argument("out-file", type=click.Path(), required=True)
@click.option("--height", type=int, required=True, help="Sync node up to this height")
| CLI: Offer shows XCH instead of TXCH when connected to a testnet
Can confirm I also saw this in Ubuntu 20.04, same build of chia, in CLI. From a different Offer:
```
Summary:
OFFERED:
- None (Wallet ID: 2): 1000.0 (1000000 mojos)
REQUESTED:
- XCH (Wallet ID: 1): 1.0 (1000000000000 mojos)
Included Fees: 0
```
_Originally posted by @Starttoaster in https://github.com/Chia-Network/chia-blockchain/issues/11086#issuecomment-1092221650_
| This issue has not been updated in 14 days and is now flagged as stale. If this issue is still affecting you and in need of further review, please comment on it with an update to keep it from auto closing in 7 days.
This issue has not been updated in 14 days and is now flagged as stale. If this issue is still affecting you and in need of further review, please comment on it with an update to keep it from auto closing in 7 days.
This issue was automatically closed because it has been flagged as stale, and subsequently passed 7 days with no further activity from the submitter or watchers. | 2023-05-19T21:17:22Z | [] | [] |
Chia-Network/chia-blockchain | 15,427 | Chia-Network__chia-blockchain-15427 | [
"15414"
] | bcead6a60077e8ceee7c8dccef15dde1252787f4 | diff --git a/chia/data_layer/data_layer_errors.py b/chia/data_layer/data_layer_errors.py
--- a/chia/data_layer/data_layer_errors.py
+++ b/chia/data_layer/data_layer_errors.py
@@ -40,3 +40,7 @@ def __init__(self, key: bytes) -> None:
class OfferIntegrityError(Exception):
pass
+
+
+class LauncherCoinNotFoundError(Exception):
+ pass
diff --git a/chia/data_layer/data_layer_wallet.py b/chia/data_layer/data_layer_wallet.py
--- a/chia/data_layer/data_layer_wallet.py
+++ b/chia/data_layer/data_layer_wallet.py
@@ -11,7 +11,7 @@
from typing_extensions import final
from chia.consensus.block_record import BlockRecord
-from chia.data_layer.data_layer_errors import OfferIntegrityError
+from chia.data_layer.data_layer_errors import LauncherCoinNotFoundError, OfferIntegrityError
from chia.data_layer.data_layer_util import OfferStore, ProofOfInclusion, ProofOfInclusionLayer, StoreProofs, leaf_hash
from chia.protocols.wallet_protocol import CoinState
from chia.server.ws_connection import WSChiaConnection
@@ -199,7 +199,7 @@ async def get_launcher_coin_state(self, launcher_id: bytes32, peer: WSChiaConnec
)
if len(coin_states) == 0:
- raise ValueError(f"Launcher ID {launcher_id} is not a valid coin")
+ raise LauncherCoinNotFoundError(f"Launcher ID {launcher_id} is not a valid coin")
if coin_states[0].coin.puzzle_hash != SINGLETON_LAUNCHER.get_tree_hash():
raise ValueError(f"Coin with ID {launcher_id} is not a singleton launcher")
if coin_states[0].created_height is None:
diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py
--- a/chia/rpc/wallet_rpc_api.py
+++ b/chia/rpc/wallet_rpc_api.py
@@ -9,6 +9,7 @@
from blspy import AugSchemeMPL, G1Element, G2Element, PrivateKey
from chia.consensus.block_rewards import calculate_base_farmer_reward
+from chia.data_layer.data_layer_errors import LauncherCoinNotFoundError
from chia.data_layer.data_layer_wallet import DataLayerWallet
from chia.pools.pool_wallet import PoolWallet
from chia.pools.pool_wallet_info import FARMING_TO_POOL, PoolState, PoolWalletInfo, create_pool_state
@@ -3185,10 +3186,18 @@ async def dl_track_new(self, request) -> Dict:
dl_wallet = await DataLayerWallet.create_new_dl_wallet(
self.service.wallet_state_manager,
)
- await dl_wallet.track_new_launcher_id(
- bytes32.from_hexstr(request["launcher_id"]),
- self.service.get_full_node_peer(),
- )
+ peer_list = self.service.get_full_node_peers_in_order()
+ peer_length = len(peer_list)
+ for i, peer in enumerate(peer_list):
+ try:
+ await dl_wallet.track_new_launcher_id(
+ bytes32.from_hexstr(request["launcher_id"]),
+ peer,
+ )
+ except LauncherCoinNotFoundError as e:
+ if i == peer_length - 1:
+ raise e # raise the error if we've tried all peers
+ continue # try some other peers, maybe someone has it
return {}
async def dl_stop_tracking(self, request) -> Dict:
| diff --git a/tests/wallet/db_wallet/test_dl_wallet.py b/tests/wallet/db_wallet/test_dl_wallet.py
--- a/tests/wallet/db_wallet/test_dl_wallet.py
+++ b/tests/wallet/db_wallet/test_dl_wallet.py
@@ -6,6 +6,7 @@
import pytest
+from chia.data_layer.data_layer_errors import LauncherCoinNotFoundError
from chia.data_layer.data_layer_wallet import DataLayerWallet, Mirror
from chia.simulator.setup_nodes import SimulatorsAndWallets
from chia.simulator.simulator_protocol import FarmNewBlockProtocol
@@ -172,6 +173,12 @@ async def test_tracking_non_owned(
async with wallet_node_1.wallet_state_manager.lock:
dl_wallet_1 = await DataLayerWallet.create_new_dl_wallet(wallet_node_1.wallet_state_manager)
+ peer = wallet_node_1.get_full_node_peer()
+
+ # Test tracking a launcher id that does not exist
+ with pytest.raises(LauncherCoinNotFoundError):
+ await dl_wallet_0.track_new_launcher_id(bytes32([1] * 32), peer)
+
nodes = [Program.to("thing").get_tree_hash(), Program.to([8]).get_tree_hash()]
current_tree = MerkleTree(nodes)
current_root = current_tree.calculate_root()
@@ -187,7 +194,6 @@ async def test_tracking_non_owned(
await time_out_assert(15, is_singleton_confirmed, True, dl_wallet_0, launcher_id)
await asyncio.sleep(0.5)
- peer = wallet_node_1.get_full_node_peer()
await dl_wallet_1.track_new_launcher_id(launcher_id, peer)
await time_out_assert(15, is_singleton_confirmed, True, dl_wallet_1, launcher_id)
await asyncio.sleep(0.5)
diff --git a/tests/wallet/rpc/test_dl_wallet_rpc.py b/tests/wallet/rpc/test_dl_wallet_rpc.py
--- a/tests/wallet/rpc/test_dl_wallet_rpc.py
+++ b/tests/wallet/rpc/test_dl_wallet_rpc.py
@@ -111,6 +111,10 @@ async def is_singleton_confirmed(rpc_client: WalletRpcClient, lid: bytes32) -> b
assert await client.dl_history(launcher_id) == [new_singleton_record, singleton_record]
+ # Test tracking a launcher id that does not exist
+ with pytest.raises(ValueError):
+ await client_2.dl_track_new(bytes32([1] * 32))
+
await client_2.dl_track_new(launcher_id)
async def is_singleton_generation(rpc_client: WalletRpcClient, lid: bytes32, generation: int) -> bool:
| [Bug] Datalayer subscribing to new store sometimes fails with Launcher ID is not a valid coin
### What happened?
Occasional failures when subscribing to a datalayer singleton with `Launcher ID <id> is not a valid coin` - for example:
```
2023-05-30T15:25:35.014 wallet chia.rpc.util : WARNING Error while handling message: Traceback (most recent call last):
File "chia\rpc\util.py", line 18, in inner
File "chia\rpc\wallet_rpc_api.py", line 3094, in dl_track_new
File "chia\data_layer\data_layer_wallet.py", line 220, in track_new_launcher_id
File "chia\data_layer\data_layer_wallet.py", line 196, in get_launcher_coin_state
ValueError: Launcher ID fa47700cb693529602c3eab47a5d681ffe0145dabeee6c69cabdd7869537b917 is not a valid coin
```
The code in `dl_track_new` only asks for the coin from a single node. If this node for whatever reason doesn't have this coin, this error is returned. The wallet will likely continue to ask the same peer again on a retry.
It might make sense to use (certainly when in light wallet mode) the full list of peers and ask all the peers about this coin, as one node might have it correctly. This would hopefully reduce errors where the one selected node happens for whatever reason to not have this coin in its DB
### Version
all
### What platform are you using?
Windows
### What ui mode are you using?
GUI
### Relevant log output
_No response_
| 2023-06-01T15:04:27Z | [] | [] |
|
Chia-Network/chia-blockchain | 16,500 | Chia-Network__chia-blockchain-16500 | [
"16456",
"16469"
] | 52fff1451bb0c4f08e44057469c5218baf09d0b6 | diff --git a/chia/cmds/chia.py b/chia/cmds/chia.py
--- a/chia/cmds/chia.py
+++ b/chia/cmds/chia.py
@@ -9,6 +9,7 @@
from chia.cmds.beta import beta_cmd
from chia.cmds.completion import completion
from chia.cmds.configure import configure_cmd
+from chia.cmds.dao import dao_cmd
from chia.cmds.data import data_cmd
from chia.cmds.db import db_cmd
from chia.cmds.dev import dev_cmd
@@ -128,6 +129,7 @@ def run_daemon_cmd(ctx: click.Context, wait_for_unlock: bool) -> None:
cli.add_command(passphrase_cmd)
cli.add_command(beta_cmd)
cli.add_command(completion)
+cli.add_command(dao_cmd)
cli.add_command(dev_cmd)
diff --git a/chia/cmds/dao.py b/chia/cmds/dao.py
new file mode 100644
--- /dev/null
+++ b/chia/cmds/dao.py
@@ -0,0 +1,1003 @@
+from __future__ import annotations
+
+import asyncio
+from typing import Optional, Sequence
+
+import click
+
+from chia.cmds.cmds_util import tx_config_args
+from chia.cmds.plotnft import validate_fee
+
+
+@click.group("dao", short_help="Create, manage or show state of DAOs", no_args_is_help=True)
+@click.pass_context
+def dao_cmd(ctx: click.Context) -> None:
+ pass
+
+
+# ----------------------------------------------------------------------------------------
+# ADD
+
+
+@dao_cmd.command("add", short_help="Create a wallet for an existing DAO", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-n", "--name", help="Set the DAO wallet name", type=str)
+@click.option(
+ "-t",
+ "--treasury-id",
+ help="The Treasury ID of the DAO you want to track",
+ type=str,
+ required=True,
+)
+@click.option(
+ "-a",
+ "--filter-amount",
+ help="The minimum number of votes a proposal needs before the wallet will recognise it",
+ type=int,
+ default=1,
+ show_default=True,
+)
+def dao_add_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ treasury_id: str,
+ filter_amount: int,
+ name: Optional[str],
+) -> None:
+ from .dao_funcs import add_dao_wallet
+
+ extra_params = {
+ "name": name,
+ "treasury_id": treasury_id,
+ "filter_amount": filter_amount,
+ }
+
+ asyncio.run(add_dao_wallet(extra_params, wallet_rpc_port, fingerprint))
+
+
+# ----------------------------------------------------------------------------------------
+# CREATE
+
+
+@dao_cmd.command("create", short_help="Create a new DAO wallet and treasury", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-n", "--name", help="Set the DAO wallet name", type=str)
+@click.option(
+ "--proposal-timelock",
+ help="The minimum number of blocks before a proposal can close",
+ type=int,
+ default=1000,
+ show_default=True,
+)
+@click.option(
+ "--soft-close",
+ help="The number of blocks a proposal must remain unspent before closing",
+ type=int,
+ default=20,
+ show_default=True,
+)
+@click.option(
+ "--attendance-required",
+ help="The minimum number of votes a proposal must receive to be accepted",
+ type=int,
+ required=True,
+)
+@click.option(
+ "--pass-percentage",
+ help="The percentage of 'yes' votes in basis points a proposal must receive to be accepted. 100% = 10000",
+ type=int,
+ default=5000,
+ show_default=True,
+)
+@click.option(
+ "--self-destruct",
+ help="The number of blocks required before a proposal can be automatically removed",
+ type=int,
+ default=10000,
+ show_default=True,
+)
+@click.option(
+ "--oracle-delay",
+ help="The number of blocks required between oracle spends of the treasury",
+ type=int,
+ default=50,
+ show_default=True,
+)
+@click.option(
+ "--proposal-minimum",
+ help="The minimum amount (in xch) that a proposal must use to be created",
+ type=str,
+ default="0.000000000001",
+ show_default=True,
+)
+@click.option(
+ "--filter-amount",
+ help="The minimum number of votes a proposal needs before the wallet will recognise it",
+ type=int,
+ default=1,
+ show_default=True,
+)
+@click.option(
+ "--cat-amount",
+ help="The number of DAO CATs (in mojos) to create when initializing the DAO",
+ type=int,
+ required=True,
+)
+@click.option(
+ "-m",
+ "--fee",
+ help="Set the fees per transaction, in XCH.",
+ type=str,
+ default="0",
+ show_default=True,
+ callback=validate_fee,
+)
+@click.option(
+ "--fee-for-cat",
+ help="Set the fees for the CAT creation transaction, in XCH.",
+ type=str,
+ default="0",
+ show_default=True,
+ callback=validate_fee,
+)
+@tx_config_args
+def dao_create_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ proposal_timelock: int,
+ soft_close: int,
+ attendance_required: int,
+ pass_percentage: int,
+ self_destruct: int,
+ oracle_delay: int,
+ proposal_minimum: str,
+ filter_amount: int,
+ cat_amount: int,
+ name: Optional[str],
+ fee: str,
+ fee_for_cat: str,
+ min_coin_amount: Optional[str],
+ max_coin_amount: Optional[str],
+ coins_to_exclude: Sequence[str],
+ amounts_to_exclude: Sequence[str],
+ reuse: Optional[bool],
+) -> None:
+ from .dao_funcs import create_dao_wallet
+
+ print("Creating new DAO")
+
+ extra_params = {
+ "fee": fee,
+ "fee_for_cat": fee_for_cat,
+ "name": name,
+ "proposal_timelock": proposal_timelock,
+ "soft_close_length": soft_close,
+ "attendance_required": attendance_required,
+ "pass_percentage": pass_percentage,
+ "self_destruct_length": self_destruct,
+ "oracle_spend_delay": oracle_delay,
+ "proposal_minimum_amount": proposal_minimum,
+ "filter_amount": filter_amount,
+ "amount_of_cats": cat_amount,
+ "min_coin_amount": min_coin_amount,
+ "max_coin_amount": max_coin_amount,
+ "coins_to_exclude": coins_to_exclude,
+ "amounts_to_exclude": amounts_to_exclude,
+ "reuse_puzhash": reuse,
+ }
+ asyncio.run(create_dao_wallet(extra_params, wallet_rpc_port, fingerprint))
+
+
+# ----------------------------------------------------------------------------------------
+# TREASURY INFO
+
+
+@dao_cmd.command("get_id", short_help="Get the Treasury ID of a DAO", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--wallet-id", help="DAO Wallet ID", type=int, required=True)
+def dao_get_id_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ wallet_id: int,
+) -> None:
+ from .dao_funcs import get_treasury_id
+
+ extra_params = {
+ "wallet_id": wallet_id,
+ }
+ asyncio.run(get_treasury_id(extra_params, wallet_rpc_port, fingerprint))
+
+
+@dao_cmd.command("add_funds", short_help="Send funds to a DAO treasury", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--wallet-id", help="DAO Wallet ID which will receive the funds", type=int, required=True)
+@click.option(
+ "-w",
+ "--funding-wallet-id",
+ help="ID of the wallet to send funds from",
+ type=int,
+ required=True,
+)
+@click.option(
+ "-a",
+ "--amount",
+ help="The amount of funds to send",
+ type=str,
+ required=True,
+)
+@click.option(
+ "-m",
+ "--fee",
+ help="Set the fees per transaction, in XCH.",
+ type=str,
+ default="0",
+ show_default=True,
+ callback=validate_fee,
+)
+@tx_config_args
+def dao_add_funds_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ wallet_id: int,
+ funding_wallet_id: int,
+ amount: str,
+ fee: str,
+ min_coin_amount: Optional[str],
+ max_coin_amount: Optional[str],
+ coins_to_exclude: Sequence[str],
+ amounts_to_exclude: Sequence[str],
+ reuse: Optional[bool],
+) -> None:
+ from .dao_funcs import add_funds_to_treasury
+
+ extra_params = {
+ "wallet_id": wallet_id,
+ "fee": fee,
+ "funding_wallet_id": funding_wallet_id,
+ "amount": amount,
+ "min_coin_amount": min_coin_amount,
+ "max_coin_amount": max_coin_amount,
+ "coins_to_exclude": coins_to_exclude,
+ "amounts_to_exclude": amounts_to_exclude,
+ "reuse_puzhash": reuse,
+ }
+ asyncio.run(add_funds_to_treasury(extra_params, wallet_rpc_port, fingerprint))
+
+
+@dao_cmd.command("balance", short_help="Get the asset balances for a DAO treasury", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True)
+def dao_get_balance_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ wallet_id: int,
+) -> None:
+ from .dao_funcs import get_treasury_balance
+
+ extra_params = {
+ "wallet_id": wallet_id,
+ }
+ asyncio.run(get_treasury_balance(extra_params, wallet_rpc_port, fingerprint))
+
+
+@dao_cmd.command("rules", short_help="Get the current rules governing the DAO", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True)
+def dao_rules_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ wallet_id: int,
+) -> None:
+ from .dao_funcs import get_rules
+
+ extra_params = {
+ "wallet_id": wallet_id,
+ }
+ asyncio.run(get_rules(extra_params, wallet_rpc_port, fingerprint))
+
+
+# ----------------------------------------------------------------------------------------
+# LIST/SHOW PROPOSALS
+
+
+@dao_cmd.command("list_proposals", short_help="List proposals for the DAO", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True)
+@click.option(
+ "-c",
+ "--include-closed",
+ help="Include previously closed proposals",
+ is_flag=True,
+)
+def dao_list_proposals_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ wallet_id: int,
+ include_closed: Optional[bool],
+) -> None:
+ from .dao_funcs import list_proposals
+
+ if not include_closed:
+ include_closed = False
+
+ extra_params = {
+ "wallet_id": wallet_id,
+ "include_closed": include_closed,
+ }
+ asyncio.run(list_proposals(extra_params, wallet_rpc_port, fingerprint))
+
+
+@dao_cmd.command("show_proposal", short_help="Show the details of a specific proposal", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True)
+@click.option(
+ "-p",
+ "--proposal_id",
+ help="The ID of the proposal to fetch",
+ type=str,
+ required=True,
+)
+def dao_show_proposal_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ wallet_id: int,
+ proposal_id: str,
+) -> None:
+ from .dao_funcs import show_proposal
+
+ extra_params = {
+ "wallet_id": wallet_id,
+ "proposal_id": proposal_id,
+ }
+ asyncio.run(show_proposal(extra_params, wallet_rpc_port, fingerprint))
+
+
+# ----------------------------------------------------------------------------------------
+# VOTE
+
+
+@dao_cmd.command("vote", short_help="Vote on a DAO proposal", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True)
+@click.option(
+ "-p",
+ "--proposal-id",
+ help="The ID of the proposal you are voting on",
+ type=str,
+ required=True,
+)
+@click.option(
+ "-a",
+ "--vote-amount",
+ help="The number of votes you want to cast",
+ type=int,
+ required=True,
+)
+@click.option(
+ "-n",
+ "--vote-no",
+ help="Use this option to vote against a proposal. If not present then the vote is for the proposal",
+ is_flag=True,
+)
+@click.option(
+ "-m",
+ "--fee",
+ help="Set the fees per transaction, in XCH.",
+ type=str,
+ default="0",
+ show_default=True,
+ callback=validate_fee,
+)
+@tx_config_args
+def dao_vote_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ wallet_id: int,
+ proposal_id: str,
+ vote_amount: int,
+ vote_no: Optional[bool],
+ fee: str,
+ min_coin_amount: Optional[str],
+ max_coin_amount: Optional[str],
+ coins_to_exclude: Sequence[str],
+ amounts_to_exclude: Sequence[str],
+ reuse: Optional[bool],
+) -> None:
+ from .dao_funcs import vote_on_proposal
+
+ is_yes_vote = False if vote_no else True
+
+ extra_params = {
+ "wallet_id": wallet_id,
+ "fee": fee,
+ "proposal_id": proposal_id,
+ "vote_amount": vote_amount,
+ "is_yes_vote": is_yes_vote,
+ "min_coin_amount": min_coin_amount,
+ "max_coin_amount": max_coin_amount,
+ "coins_to_exclude": coins_to_exclude,
+ "amounts_to_exclude": amounts_to_exclude,
+ "reuse_puzhash": reuse,
+ }
+ asyncio.run(vote_on_proposal(extra_params, wallet_rpc_port, fingerprint))
+
+
+# ----------------------------------------------------------------------------------------
+# CLOSE PROPOSALS
+
+
+@dao_cmd.command("close_proposal", short_help="Close a DAO proposal", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True)
+@click.option(
+ "-p",
+ "--proposal-id",
+ help="The ID of the proposal you are voting on",
+ type=str,
+ required=True,
+)
+@click.option(
+ "-d",
+ "--self-destruct",
+ help="If a proposal is broken, use self destruct to force it to close",
+ is_flag=True,
+ default=False,
+)
+@click.option(
+ "-m",
+ "--fee",
+ help="Set the fees per transaction, in XCH.",
+ type=str,
+ default="0",
+ show_default=True,
+ callback=validate_fee,
+)
+@tx_config_args
+def dao_close_proposal_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ wallet_id: int,
+ proposal_id: str,
+ self_destruct: bool,
+ fee: str,
+ min_coin_amount: Optional[str],
+ max_coin_amount: Optional[str],
+ coins_to_exclude: Sequence[str],
+ amounts_to_exclude: Sequence[str],
+ reuse: Optional[bool],
+) -> None:
+ from .dao_funcs import close_proposal
+
+ extra_params = {
+ "wallet_id": wallet_id,
+ "fee": fee,
+ "proposal_id": proposal_id,
+ "self_destruct": self_destruct,
+ "min_coin_amount": min_coin_amount,
+ "max_coin_amount": max_coin_amount,
+ "coins_to_exclude": coins_to_exclude,
+ "amounts_to_exclude": amounts_to_exclude,
+ "reuse_puzhash": reuse,
+ }
+ asyncio.run(close_proposal(extra_params, wallet_rpc_port, fingerprint))
+
+
+# ----------------------------------------------------------------------------------------
+# LOCKUP COINS
+
+
+@dao_cmd.command("lockup_coins", short_help="Lock DAO CATs for voting", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True)
+@click.option(
+ "-a",
+ "--amount",
+ help="The amount of CATs (not mojos) to lock in voting mode",
+ type=str,
+ required=True,
+)
+@click.option(
+ "-m",
+ "--fee",
+ help="Set the fees per transaction, in XCH.",
+ type=str,
+ default="0",
+ show_default=True,
+ callback=validate_fee,
+)
+@tx_config_args
+def dao_lockup_coins_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ wallet_id: int,
+ amount: str,
+ fee: str,
+ min_coin_amount: Optional[str],
+ max_coin_amount: Optional[str],
+ coins_to_exclude: Sequence[str],
+ amounts_to_exclude: Sequence[str],
+ reuse: Optional[bool],
+) -> None:
+ from .dao_funcs import lockup_coins
+
+ extra_params = {
+ "wallet_id": wallet_id,
+ "fee": fee,
+ "amount": amount,
+ "min_coin_amount": min_coin_amount,
+ "max_coin_amount": max_coin_amount,
+ "coins_to_exclude": coins_to_exclude,
+ "amounts_to_exclude": amounts_to_exclude,
+ "reuse_puzhash": reuse,
+ }
+ asyncio.run(lockup_coins(extra_params, wallet_rpc_port, fingerprint))
+
+
+@dao_cmd.command("release_coins", short_help="Release closed proposals from DAO CATs", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True)
+@click.option(
+ "-m",
+ "--fee",
+ help="Set the fees per transaction, in XCH.",
+ type=str,
+ default="0",
+ show_default=True,
+ callback=validate_fee,
+)
+@tx_config_args
+def dao_release_coins_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ wallet_id: int,
+ fee: str,
+ min_coin_amount: Optional[str],
+ max_coin_amount: Optional[str],
+ coins_to_exclude: Sequence[str],
+ amounts_to_exclude: Sequence[str],
+ reuse: Optional[bool],
+) -> None:
+ from .dao_funcs import release_coins
+
+ extra_params = {
+ "wallet_id": wallet_id,
+ "fee": fee,
+ "min_coin_amount": min_coin_amount,
+ "max_coin_amount": max_coin_amount,
+ "coins_to_exclude": coins_to_exclude,
+ "amounts_to_exclude": amounts_to_exclude,
+ "reuse_puzhash": reuse,
+ }
+ asyncio.run(release_coins(extra_params, wallet_rpc_port, fingerprint))
+
+
+@dao_cmd.command("exit_lockup", short_help="Release DAO CATs from voting mode", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True)
+@click.option(
+ "-m",
+ "--fee",
+ help="Set the fees per transaction, in XCH.",
+ type=str,
+ default="0",
+ show_default=True,
+ callback=validate_fee,
+)
+@tx_config_args
+def dao_exit_lockup_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ wallet_id: int,
+ fee: str,
+ min_coin_amount: Optional[str],
+ max_coin_amount: Optional[str],
+ coins_to_exclude: Sequence[str],
+ amounts_to_exclude: Sequence[str],
+ reuse: Optional[bool],
+) -> None:
+ from .dao_funcs import exit_lockup
+
+ extra_params = {
+ "wallet_id": wallet_id,
+ "fee": fee,
+ "min_coin_amount": min_coin_amount,
+ "max_coin_amount": max_coin_amount,
+ "coins_to_exclude": coins_to_exclude,
+ "amounts_to_exclude": amounts_to_exclude,
+ "reuse_puzhash": reuse,
+ }
+ asyncio.run(exit_lockup(extra_params, wallet_rpc_port, fingerprint))
+
+
+# ----------------------------------------------------------------------------------------
+# CREATE PROPOSALS
+
+
+@dao_cmd.group("create_proposal", short_help="Create and add a proposal to a DAO", no_args_is_help=True)
+@click.pass_context
+def dao_proposal(ctx: click.Context) -> None:
+ pass
+
+
+@dao_proposal.command("spend", short_help="Create a proposal to spend DAO funds", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True)
+@click.option(
+ "-t",
+ "--to-address",
+ help="The address the proposal will send funds to",
+ type=str,
+ required=False,
+ default=None,
+)
+@click.option(
+ "-a",
+ "--amount",
+ help="The amount of funds the proposal will send (in mojos)",
+ type=float,
+ required=False,
+ default=None,
+)
+@click.option(
+ "-v",
+ "--vote-amount",
+ help="The number of votes to add",
+ type=int,
+ required=False,
+ default=None,
+)
+@click.option(
+ "--asset-id",
+ help="The asset id of the funds the proposal will send. Leave blank for xch",
+ type=str,
+ required=False,
+ default=None,
+)
+@click.option(
+ "-j",
+ "--from-json",
+ help="Path to a json file containing a list of additions, for use in proposals with multiple spends",
+ type=str,
+ required=False,
+ default=None,
+)
+@click.option(
+ "-m",
+ "--fee",
+ help="Set the fees per transaction, in XCH.",
+ type=str,
+ default="0",
+ show_default=True,
+ callback=validate_fee,
+)
+@tx_config_args
+def dao_create_spend_proposal_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ wallet_id: int,
+ vote_amount: Optional[int],
+ to_address: Optional[str],
+ amount: Optional[str],
+ asset_id: Optional[str],
+ from_json: Optional[str],
+ fee: str,
+ min_coin_amount: Optional[str],
+ max_coin_amount: Optional[str],
+ coins_to_exclude: Sequence[str],
+ amounts_to_exclude: Sequence[str],
+ reuse: Optional[bool],
+) -> None:
+ from .dao_funcs import create_spend_proposal
+
+ extra_params = {
+ "wallet_id": wallet_id,
+ "fee": fee,
+ "vote_amount": vote_amount,
+ "to_address": to_address,
+ "amount": amount,
+ "asset_id": asset_id,
+ "from_json": from_json,
+ "min_coin_amount": min_coin_amount,
+ "max_coin_amount": max_coin_amount,
+ "coins_to_exclude": coins_to_exclude,
+ "amounts_to_exclude": amounts_to_exclude,
+ "reuse_puzhash": reuse,
+ }
+ asyncio.run(create_spend_proposal(extra_params, wallet_rpc_port, fingerprint))
+
+
+@dao_proposal.command("update", short_help="Create a proposal to change the DAO rules", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True)
+@click.option(
+ "-v",
+ "--vote-amount",
+ help="The number of votes to add",
+ type=int,
+ required=False,
+ default=None,
+)
+@click.option(
+ "--proposal-timelock",
+ help="The new minimum number of blocks before a proposal can close",
+ type=int,
+ default=None,
+ required=False,
+)
+@click.option(
+ "--soft-close",
+ help="The number of blocks a proposal must remain unspent before closing",
+ type=int,
+ default=None,
+ required=False,
+)
+@click.option(
+ "--attendance-required",
+ help="The minimum number of votes a proposal must receive to be accepted",
+ type=int,
+ default=None,
+ required=False,
+)
+@click.option(
+ "--pass-percentage",
+ help="The percentage of 'yes' votes in basis points a proposal must receive to be accepted. 100% = 10000",
+ type=int,
+ default=None,
+ required=False,
+)
+@click.option(
+ "--self-destruct",
+ help="The number of blocks required before a proposal can be automatically removed",
+ type=int,
+ default=None,
+ required=False,
+)
+@click.option(
+ "--oracle-delay",
+ help="The number of blocks required between oracle spends of the treasury",
+ type=int,
+ default=None,
+ required=False,
+)
+@click.option(
+ "-m",
+ "--fee",
+ help="Set the fees per transaction, in XCH.",
+ type=str,
+ default="0",
+ show_default=True,
+ callback=validate_fee,
+)
+@tx_config_args
+def dao_create_update_proposal_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ wallet_id: int,
+ vote_amount: Optional[int],
+ proposal_timelock: Optional[int],
+ soft_close: Optional[int],
+ attendance_required: Optional[int],
+ pass_percentage: Optional[int],
+ self_destruct: Optional[int],
+ oracle_delay: Optional[int],
+ fee: str,
+ min_coin_amount: Optional[str],
+ max_coin_amount: Optional[str],
+ coins_to_exclude: Sequence[str],
+ amounts_to_exclude: Sequence[str],
+ reuse: Optional[bool],
+) -> None:
+ from .dao_funcs import create_update_proposal
+
+ extra_params = {
+ "wallet_id": wallet_id,
+ "fee": fee,
+ "vote_amount": vote_amount,
+ "proposal_timelock": proposal_timelock,
+ "soft_close_length": soft_close,
+ "attendance_required": attendance_required,
+ "pass_percentage": pass_percentage,
+ "self_destruct_length": self_destruct,
+ "oracle_spend_delay": oracle_delay,
+ "min_coin_amount": min_coin_amount,
+ "max_coin_amount": max_coin_amount,
+ "coins_to_exclude": coins_to_exclude,
+ "amounts_to_exclude": amounts_to_exclude,
+ "reuse_puzhash": reuse,
+ }
+ asyncio.run(create_update_proposal(extra_params, wallet_rpc_port, fingerprint))
+
+
+@dao_proposal.command("mint", short_help="Create a proposal to mint new DAO CATs", no_args_is_help=True)
+@click.option(
+ "-wp",
+ "--wallet-rpc-port",
+ help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
+ type=int,
+ default=None,
+)
+@click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int)
+@click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True)
+@click.option(
+ "-a",
+ "--amount",
+ help="The amount of new cats the proposal will mint (in mojos)",
+ type=int,
+ required=True,
+)
+@click.option(
+ "-t",
+ "--to-address",
+ help="The address new cats will be minted to",
+ type=str,
+ required=True,
+ default=None,
+)
+@click.option(
+ "-v",
+ "--vote-amount",
+ help="The number of votes to add",
+ type=int,
+ required=False,
+ default=None,
+)
+@click.option(
+ "-m",
+ "--fee",
+ help="Set the fees per transaction, in XCH.",
+ type=str,
+ default="0",
+ show_default=True,
+ callback=validate_fee,
+)
+@tx_config_args
+def dao_create_mint_proposal_cmd(
+ wallet_rpc_port: Optional[int],
+ fingerprint: int,
+ wallet_id: int,
+ amount: int,
+ to_address: int,
+ vote_amount: Optional[int],
+ fee: str,
+ min_coin_amount: Optional[str],
+ max_coin_amount: Optional[str],
+ coins_to_exclude: Sequence[str],
+ amounts_to_exclude: Sequence[str],
+ reuse: Optional[bool],
+) -> None:
+ from .dao_funcs import create_mint_proposal
+
+ extra_params = {
+ "wallet_id": wallet_id,
+ "fee": fee,
+ "amount": amount,
+ "cat_target_address": to_address,
+ "vote_amount": vote_amount,
+ "min_coin_amount": min_coin_amount,
+ "max_coin_amount": max_coin_amount,
+ "coins_to_exclude": coins_to_exclude,
+ "amounts_to_exclude": amounts_to_exclude,
+ "reuse_puzhash": reuse,
+ }
+ asyncio.run(create_mint_proposal(extra_params, wallet_rpc_port, fingerprint))
+
+
+# ----------------------------------------------------------------------------------------
+
+dao_cmd.add_command(dao_add_cmd)
+dao_cmd.add_command(dao_create_cmd)
+dao_cmd.add_command(dao_add_funds_cmd)
+dao_cmd.add_command(dao_get_balance_cmd)
+dao_cmd.add_command(dao_list_proposals_cmd)
+dao_cmd.add_command(dao_show_proposal_cmd)
+dao_cmd.add_command(dao_vote_cmd)
+dao_cmd.add_command(dao_close_proposal_cmd)
+dao_cmd.add_command(dao_lockup_coins_cmd)
+dao_cmd.add_command(dao_exit_lockup_cmd)
+dao_cmd.add_command(dao_release_coins_cmd)
+dao_cmd.add_command(dao_proposal)
diff --git a/chia/cmds/dao_funcs.py b/chia/cmds/dao_funcs.py
new file mode 100644
--- /dev/null
+++ b/chia/cmds/dao_funcs.py
@@ -0,0 +1,580 @@
+from __future__ import annotations
+
+import asyncio
+import json
+import time
+from decimal import Decimal
+from typing import Any, Dict, Optional
+
+from chia.cmds.cmds_util import CMDTXConfigLoader, get_wallet_client, transaction_status_msg, transaction_submitted_msg
+from chia.cmds.units import units
+from chia.cmds.wallet_funcs import get_mojo_per_unit, get_wallet_type
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.util.bech32m import decode_puzzle_hash, encode_puzzle_hash
+from chia.util.config import selected_network_address_prefix
+from chia.util.ints import uint64
+from chia.wallet.util.tx_config import DEFAULT_COIN_SELECTION_CONFIG
+from chia.wallet.util.wallet_types import WalletType
+
+
+async def add_dao_wallet(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ treasury_id = args["treasury_id"]
+ filter_amount = args["filter_amount"]
+ name = args["name"]
+
+ print(f"Adding wallet for DAO: {treasury_id}")
+ print("This may take awhile.")
+
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config):
+ res = await wallet_client.create_new_dao_wallet(
+ mode="existing",
+ tx_config=CMDTXConfigLoader.from_json_dict({"reuse_puzhash": True}).to_tx_config(
+ units["chia"], config, fingerprint
+ ),
+ dao_rules=None,
+ amount_of_cats=None,
+ treasury_id=treasury_id,
+ filter_amount=filter_amount,
+ name=name,
+ )
+
+ print("Successfully created DAO Wallet")
+ print("DAO Treasury ID: {treasury_id}".format(**res))
+ print("DAO Wallet ID: {wallet_id}".format(**res))
+ print("CAT Wallet ID: {cat_wallet_id}".format(**res))
+ print("DAOCAT Wallet ID: {dao_cat_wallet_id}".format(**res))
+
+
+async def create_dao_wallet(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ proposal_minimum = uint64(int(Decimal(args["proposal_minimum_amount"]) * units["chia"]))
+
+ if proposal_minimum % 2 == 0:
+ proposal_minimum = uint64(1 + proposal_minimum)
+ print("Adding 1 mojo to proposal minimum amount")
+
+ dao_rules = {
+ "proposal_timelock": args["proposal_timelock"],
+ "soft_close_length": args["soft_close_length"],
+ "attendance_required": args["attendance_required"],
+ "pass_percentage": args["pass_percentage"],
+ "self_destruct_length": args["self_destruct_length"],
+ "oracle_spend_delay": args["oracle_spend_delay"],
+ "proposal_minimum_amount": proposal_minimum,
+ }
+ amount_of_cats = args["amount_of_cats"]
+ filter_amount = args["filter_amount"]
+ name = args["name"]
+
+ fee = Decimal(args["fee"])
+ final_fee: uint64 = uint64(int(fee * units["chia"]))
+
+ fee_for_cat = Decimal(args["fee_for_cat"])
+ final_fee_for_cat: uint64 = uint64(int(fee_for_cat * units["chia"]))
+
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config):
+ conf_coins, _, _ = await wallet_client.get_spendable_coins(
+ wallet_id=1, coin_selection_config=DEFAULT_COIN_SELECTION_CONFIG
+ )
+ if len(conf_coins) < 2: # pragma: no cover
+ raise ValueError("DAO creation requires at least 2 xch coins in your wallet.")
+ res = await wallet_client.create_new_dao_wallet(
+ mode="new",
+ dao_rules=dao_rules,
+ amount_of_cats=amount_of_cats,
+ treasury_id=None,
+ filter_amount=filter_amount,
+ name=name,
+ fee=final_fee,
+ fee_for_cat=final_fee_for_cat,
+ tx_config=CMDTXConfigLoader.from_json_dict(
+ {
+ "min_coin_amount": args["min_coin_amount"],
+ "max_coin_amount": args["max_coin_amount"],
+ "coins_to_exclude": args["coins_to_exclude"],
+ "amounts_to_exclude": args["amounts_to_exclude"],
+ "reuse_puzhash": args["reuse_puzhash"],
+ }
+ ).to_tx_config(units["chia"], config, fingerprint),
+ )
+
+ print("Successfully created DAO Wallet")
+ print("DAO Treasury ID: {treasury_id}".format(**res))
+ print("DAO Wallet ID: {wallet_id}".format(**res))
+ print("CAT Wallet ID: {cat_wallet_id}".format(**res))
+ print("DAOCAT Wallet ID: {dao_cat_wallet_id}".format(**res))
+
+
+async def get_treasury_id(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ wallet_id = args["wallet_id"]
+
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _):
+ res = await wallet_client.dao_get_treasury_id(wallet_id=wallet_id)
+ treasury_id = res["treasury_id"]
+ print(f"Treasury ID: {treasury_id}")
+
+
+async def get_rules(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ wallet_id = args["wallet_id"]
+
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _):
+ res = await wallet_client.dao_get_rules(wallet_id=wallet_id)
+ rules = res["rules"]
+ for rule, val in rules.items():
+ print(f"{rule}: {val}")
+
+
+async def add_funds_to_treasury(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ wallet_id = args["wallet_id"]
+ funding_wallet_id = args["funding_wallet_id"]
+ amount = Decimal(args["amount"])
+
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config):
+ try:
+ typ = await get_wallet_type(wallet_id=funding_wallet_id, wallet_client=wallet_client)
+ mojo_per_unit = get_mojo_per_unit(typ)
+ except LookupError: # pragma: no cover
+ print(f"Wallet id: {wallet_id} not found.")
+ return
+
+ fee = Decimal(args["fee"])
+ final_fee: uint64 = uint64(int(fee * units["chia"]))
+ final_amount: uint64 = uint64(int(amount * mojo_per_unit))
+
+ res = await wallet_client.dao_add_funds_to_treasury(
+ wallet_id=wallet_id,
+ funding_wallet_id=funding_wallet_id,
+ amount=final_amount,
+ fee=final_fee,
+ tx_config=CMDTXConfigLoader.from_json_dict(
+ {
+ "min_coin_amount": args["min_coin_amount"],
+ "max_coin_amount": args["max_coin_amount"],
+ "coins_to_exclude": args["coins_to_exclude"],
+ "amounts_to_exclude": args["amounts_to_exclude"],
+ "reuse_puzhash": args["reuse_puzhash"],
+ }
+ ).to_tx_config(units["chia"], config, fingerprint),
+ )
+
+ tx_id = res["tx_id"]
+ start = time.time()
+ while time.time() - start < 10:
+ await asyncio.sleep(0.1)
+ tx = await wallet_client.get_transaction(wallet_id, bytes32.from_hexstr(tx_id))
+ if len(tx.sent_to) > 0:
+ print(transaction_submitted_msg(tx))
+ print(transaction_status_msg(fingerprint, tx_id[2:]))
+ return None
+
+ print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover
+
+
+async def get_treasury_balance(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ wallet_id = args["wallet_id"]
+
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _):
+ res = await wallet_client.dao_get_treasury_balance(wallet_id=wallet_id)
+ balances = res["balances"]
+
+ if not balances:
+ print("The DAO treasury currently has no funds")
+ return None
+
+ xch_mojos = get_mojo_per_unit(WalletType.STANDARD_WALLET)
+ cat_mojos = get_mojo_per_unit(WalletType.CAT)
+ for asset_id, balance in balances.items():
+ if asset_id == "xch":
+ print(f"XCH: {balance / xch_mojos}")
+ else:
+ print(f"{asset_id}: {balance / cat_mojos}")
+
+
+async def list_proposals(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ wallet_id = args["wallet_id"]
+ include_closed = args["include_closed"]
+
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _):
+ res = await wallet_client.dao_get_proposals(wallet_id=wallet_id, include_closed=include_closed)
+ proposals = res["proposals"]
+ soft_close_length = res["soft_close_length"]
+ print("############################")
+ for prop in proposals:
+ print("Proposal ID: {proposal_id}".format(**prop))
+ prop_status = "CLOSED" if prop["closed"] else "OPEN"
+ print(f"Status: {prop_status}")
+ print("Votes for: {yes_votes}".format(**prop))
+ votes_against = prop["amount_voted"] - prop["yes_votes"]
+ print(f"Votes against: {votes_against}")
+ print("------------------------")
+ print(f"Proposals have {soft_close_length} blocks of soft close time.")
+ print("############################")
+
+
+async def show_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ wallet_id = args["wallet_id"]
+ proposal_id = args["proposal_id"]
+
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, config):
+ res = await wallet_client.dao_parse_proposal(wallet_id, proposal_id)
+ pd = res["proposal_dictionary"]
+ blocks_needed = pd["state"]["blocks_needed"]
+ passed = pd["state"]["passed"]
+ closable = pd["state"]["closable"]
+ status = "CLOSED" if pd["state"]["closed"] else "OPEN"
+ votes_needed = pd["state"]["total_votes_needed"]
+ yes_needed = pd["state"]["yes_votes_needed"]
+
+ ptype_val = pd["proposal_type"]
+ if (ptype_val == "s") and ("mint_amount" in pd):
+ ptype = "mint"
+ elif ptype_val == "s":
+ ptype = "spend"
+ elif ptype_val == "u":
+ ptype = "update"
+
+ print("")
+ print(f"Details of Proposal: {proposal_id}")
+ print("---------------------------")
+ print("")
+ print(f"Type: {ptype.upper()}")
+ print(f"Status: {status}")
+ print(f"Passed: {passed}")
+ if not passed:
+ print(f"Yes votes needed: {yes_needed}")
+
+ if not pd["state"]["closed"]:
+ print(f"Closable: {closable}")
+ if not closable:
+ print(f"Total votes needed: {votes_needed}")
+ print(f"Blocks remaining: {blocks_needed}")
+
+ prefix = selected_network_address_prefix(config)
+ if ptype == "spend":
+ xch_conds = pd["xch_conditions"]
+ asset_conds = pd["asset_conditions"]
+ print("")
+ if xch_conds:
+ print("Proposal XCH Conditions")
+ for pmt in xch_conds:
+ puzzle_hash = encode_puzzle_hash(bytes32.from_hexstr(pmt["puzzle_hash"]), prefix)
+ amount = pmt["amount"]
+ print(f"Address: {puzzle_hash}\nAmount: {amount}\n")
+ if asset_conds:
+ print("Proposal asset Conditions")
+ for cond in asset_conds:
+ asset_id = cond["asset_id"]
+ print(f"Asset ID: {asset_id}")
+ conds = cond["conditions"]
+ for pmt in conds:
+ puzzle_hash = encode_puzzle_hash(bytes32.from_hexstr(pmt["puzzle_hash"]), prefix)
+ amount = pmt["amount"]
+ print(f"Address: {puzzle_hash}\nAmount: {amount}\n")
+
+ elif ptype == "update":
+ print("")
+ print("Proposed Rules:")
+ for key, val in pd["dao_rules"].items():
+ print(f"{key}: {val}")
+
+ elif ptype == "mint":
+ mint_amount = pd["mint_amount"]
+ address = encode_puzzle_hash(bytes32.from_hexstr(pd["new_cat_puzhash"]), prefix)
+ print("")
+ print(f"Amount of CAT to mint: {mint_amount}")
+ print(f"Address: {address}")
+
+
+async def vote_on_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ wallet_id = args["wallet_id"]
+ vote_amount = args["vote_amount"]
+ fee = args["fee"]
+ final_fee: uint64 = uint64(int(Decimal(fee) * units["chia"]))
+ proposal_id = args["proposal_id"]
+ is_yes_vote = args["is_yes_vote"]
+
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config):
+ res = await wallet_client.dao_vote_on_proposal(
+ wallet_id=wallet_id,
+ proposal_id=proposal_id,
+ vote_amount=vote_amount,
+ is_yes_vote=is_yes_vote,
+ fee=final_fee,
+ tx_config=CMDTXConfigLoader.from_json_dict(
+ {
+ "min_coin_amount": args["min_coin_amount"],
+ "max_coin_amount": args["max_coin_amount"],
+ "coins_to_exclude": args["coins_to_exclude"],
+ "amounts_to_exclude": args["amounts_to_exclude"],
+ "reuse_puzhash": args["reuse_puzhash"],
+ }
+ ).to_tx_config(units["chia"], config, fingerprint),
+ )
+ tx_id = res["tx_id"]
+ start = time.time()
+ while time.time() - start < 10:
+ await asyncio.sleep(0.1)
+ tx = await wallet_client.get_transaction(wallet_id, bytes32.from_hexstr(tx_id))
+ if len(tx.sent_to) > 0:
+ print(transaction_submitted_msg(tx))
+ print(transaction_status_msg(fingerprint, tx_id[2:]))
+ return None
+
+ print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover
+
+
+async def close_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ wallet_id = args["wallet_id"]
+ fee = args["fee"]
+ final_fee: uint64 = uint64(int(Decimal(fee) * units["chia"]))
+ proposal_id = args["proposal_id"]
+ self_destruct = args["self_destruct"]
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config):
+ res = await wallet_client.dao_close_proposal(
+ wallet_id=wallet_id,
+ proposal_id=proposal_id,
+ fee=final_fee,
+ self_destruct=self_destruct,
+ tx_config=CMDTXConfigLoader.from_json_dict(
+ {
+ "min_coin_amount": args["min_coin_amount"],
+ "max_coin_amount": args["max_coin_amount"],
+ "coins_to_exclude": args["coins_to_exclude"],
+ "amounts_to_exclude": args["amounts_to_exclude"],
+ "reuse_puzhash": args["reuse_puzhash"],
+ }
+ ).to_tx_config(units["chia"], config, fingerprint),
+ )
+ tx_id = res["tx_id"]
+ start = time.time()
+ while time.time() - start < 10:
+ await asyncio.sleep(0.1)
+ tx = await wallet_client.get_transaction(wallet_id, bytes32.from_hexstr(tx_id))
+ if len(tx.sent_to) > 0:
+ print(transaction_submitted_msg(tx))
+ print(transaction_status_msg(fingerprint, tx_id[2:]))
+ return None
+
+ print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover
+
+
+async def lockup_coins(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ wallet_id = args["wallet_id"]
+ amount = args["amount"]
+ final_amount: uint64 = uint64(int(Decimal(amount) * units["cat"]))
+ fee = args["fee"]
+ final_fee: uint64 = uint64(int(Decimal(fee) * units["chia"]))
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config):
+ res = await wallet_client.dao_send_to_lockup(
+ wallet_id=wallet_id,
+ amount=final_amount,
+ fee=final_fee,
+ tx_config=CMDTXConfigLoader.from_json_dict(
+ {
+ "min_coin_amount": args["min_coin_amount"],
+ "max_coin_amount": args["max_coin_amount"],
+ "coins_to_exclude": args["coins_to_exclude"],
+ "amounts_to_exclude": args["amounts_to_exclude"],
+ "reuse_puzhash": args["reuse_puzhash"],
+ }
+ ).to_tx_config(units["chia"], config, fingerprint),
+ )
+ tx_id = res["tx_id"]
+ start = time.time()
+ while time.time() - start < 10:
+ await asyncio.sleep(0.1)
+ tx = await wallet_client.get_transaction(wallet_id, bytes32.from_hexstr(tx_id))
+ if len(tx.sent_to) > 0:
+ print(transaction_submitted_msg(tx))
+ print(transaction_status_msg(fingerprint, tx_id[2:]))
+ return None
+
+ print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover
+
+
+async def release_coins(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ wallet_id = args["wallet_id"]
+ fee = args["fee"]
+ final_fee: uint64 = uint64(int(Decimal(fee) * units["chia"]))
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config):
+ res = await wallet_client.dao_free_coins_from_finished_proposals(
+ wallet_id=wallet_id,
+ fee=final_fee,
+ tx_config=CMDTXConfigLoader.from_json_dict(
+ {
+ "min_coin_amount": args["min_coin_amount"],
+ "max_coin_amount": args["max_coin_amount"],
+ "coins_to_exclude": args["coins_to_exclude"],
+ "amounts_to_exclude": args["amounts_to_exclude"],
+ "reuse_puzhash": args["reuse_puzhash"],
+ }
+ ).to_tx_config(units["chia"], config, fingerprint),
+ )
+ tx_id = res["tx_id"]
+ start = time.time()
+ while time.time() - start < 10:
+ await asyncio.sleep(0.1)
+ tx = await wallet_client.get_transaction(wallet_id, bytes32.from_hexstr(tx_id))
+ if len(tx.sent_to) > 0:
+ print(transaction_submitted_msg(tx))
+ print(transaction_status_msg(fingerprint, tx_id[2:]))
+ return None
+ print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover
+
+
+async def exit_lockup(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ wallet_id = args["wallet_id"]
+ fee = args["fee"]
+ final_fee: uint64 = uint64(int(Decimal(fee) * units["chia"]))
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config):
+ res = await wallet_client.dao_exit_lockup(
+ wallet_id=wallet_id,
+ coins=[],
+ fee=final_fee,
+ tx_config=CMDTXConfigLoader.from_json_dict(
+ {
+ "min_coin_amount": args["min_coin_amount"],
+ "max_coin_amount": args["max_coin_amount"],
+ "coins_to_exclude": args["coins_to_exclude"],
+ "amounts_to_exclude": args["amounts_to_exclude"],
+ "reuse_puzhash": args["reuse_puzhash"],
+ }
+ ).to_tx_config(units["chia"], config, fingerprint),
+ )
+ tx_id = res["tx_id"]
+ start = time.time()
+ while time.time() - start < 10:
+ await asyncio.sleep(0.1)
+ tx = await wallet_client.get_transaction(wallet_id, bytes32.from_hexstr(tx_id))
+ if len(tx.sent_to) > 0:
+ print(transaction_submitted_msg(tx))
+ print(transaction_status_msg(fingerprint, tx_id[2:]))
+ return None
+ print(f"Transaction not yet submitted to nodes. TX ID: {tx_id}") # pragma: no cover
+
+
+async def create_spend_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ wallet_id = args["wallet_id"]
+ fee = args["fee"]
+ final_fee: uint64 = uint64(int(Decimal(fee) * units["chia"]))
+ asset_id = args.get("asset_id")
+ address = args.get("to_address")
+ amount = args.get("amount")
+ additions_file = args.get("from_json")
+ if additions_file is None and (address is None or amount is None):
+ raise ValueError("Must include a json specification or an address / amount pair.")
+ if additions_file: # pragma: no cover
+ with open(additions_file, "r") as f:
+ additions_dict = json.load(f)
+ additions = []
+ for addition in additions_dict:
+ addition["puzzle_hash"] = decode_puzzle_hash(addition["address"]).hex()
+ del addition["address"]
+ additions.append(addition)
+ else:
+ additions = None
+ vote_amount = args.get("vote_amount")
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config):
+ wallet_type = await get_wallet_type(wallet_id=wallet_id, wallet_client=wallet_client)
+ mojo_per_unit = get_mojo_per_unit(wallet_type=wallet_type)
+ final_amount: Optional[uint64] = uint64(int(Decimal(amount) * mojo_per_unit)) if amount else None
+ res = await wallet_client.dao_create_proposal(
+ wallet_id=wallet_id,
+ proposal_type="spend",
+ additions=additions,
+ amount=final_amount,
+ inner_address=address,
+ asset_id=asset_id,
+ vote_amount=vote_amount,
+ fee=final_fee,
+ tx_config=CMDTXConfigLoader.from_json_dict(
+ {
+ "min_coin_amount": args["min_coin_amount"],
+ "max_coin_amount": args["max_coin_amount"],
+ "coins_to_exclude": args["coins_to_exclude"],
+ "amounts_to_exclude": args["amounts_to_exclude"],
+ "reuse_puzhash": args["reuse_puzhash"],
+ }
+ ).to_tx_config(units["chia"], config, fingerprint),
+ )
+ if res["success"]:
+ asset_id_name = asset_id if asset_id else "XCH"
+ print(f"Created spend proposal for asset: {asset_id_name}")
+ print("Successfully created proposal.")
+ print("Proposal ID: {}".format(res["proposal_id"]))
+ else: # pragma: no cover
+ print("Failed to create proposal.")
+
+
+async def create_update_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ wallet_id = args["wallet_id"]
+ fee = Decimal(args["fee"])
+ final_fee: uint64 = uint64(int(fee * units["chia"]))
+ proposal_timelock = args.get("proposal_timelock")
+ soft_close_length = args.get("soft_close_length")
+ attendance_required = args.get("attendance_required")
+ pass_percentage = args.get("pass_percentage")
+ self_destruct_length = args.get("self_destruct_length")
+ oracle_spend_delay = args.get("oracle_spend_delay")
+ vote_amount = args.get("vote_amount")
+ new_dao_rules = {
+ "proposal_timelock": proposal_timelock,
+ "soft_close_length": soft_close_length,
+ "attendance_required": attendance_required,
+ "pass_percentage": pass_percentage,
+ "self_destruct_length": self_destruct_length,
+ "oracle_spend_delay": oracle_spend_delay,
+ }
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config):
+ res = await wallet_client.dao_create_proposal(
+ wallet_id=wallet_id,
+ proposal_type="update",
+ new_dao_rules=new_dao_rules,
+ vote_amount=vote_amount,
+ fee=final_fee,
+ tx_config=CMDTXConfigLoader.from_json_dict(
+ {
+ "min_coin_amount": args["min_coin_amount"],
+ "max_coin_amount": args["max_coin_amount"],
+ "coins_to_exclude": args["coins_to_exclude"],
+ "amounts_to_exclude": args["amounts_to_exclude"],
+ "reuse_puzhash": args["reuse_puzhash"],
+ }
+ ).to_tx_config(units["chia"], config, fingerprint),
+ )
+ if res["success"]:
+ print("Successfully created proposal.")
+ print("Proposal ID: {}".format(res["proposal_id"]))
+ else: # pragma: no cover
+ print("Failed to create proposal.")
+
+
+async def create_mint_proposal(args: Dict[str, Any], wallet_rpc_port: Optional[int], fp: int) -> None:
+ wallet_id = args["wallet_id"]
+ fee = args["fee"]
+ final_fee: uint64 = uint64(int(Decimal(fee) * units["chia"]))
+ cat_target_address = args["cat_target_address"]
+ amount = args["amount"]
+ vote_amount = args.get("vote_amount")
+ async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config):
+ res = await wallet_client.dao_create_proposal(
+ wallet_id=wallet_id,
+ proposal_type="mint",
+ cat_target_address=cat_target_address,
+ amount=amount,
+ vote_amount=vote_amount,
+ fee=final_fee,
+ tx_config=CMDTXConfigLoader.from_json_dict(
+ {
+ "min_coin_amount": args["min_coin_amount"],
+ "max_coin_amount": args["max_coin_amount"],
+ "coins_to_exclude": args["coins_to_exclude"],
+ "amounts_to_exclude": args["amounts_to_exclude"],
+ "reuse_puzhash": args["reuse_puzhash"],
+ }
+ ).to_tx_config(units["chia"], config, fingerprint),
+ )
+ if res["success"]:
+ print("Successfully created proposal.")
+ print("Proposal ID: {}".format(res["proposal_id"]))
+ else: # pragma: no cover
+ print("Failed to create proposal.")
diff --git a/chia/cmds/wallet_funcs.py b/chia/cmds/wallet_funcs.py
--- a/chia/cmds/wallet_funcs.py
+++ b/chia/cmds/wallet_funcs.py
@@ -95,6 +95,7 @@ def get_mojo_per_unit(wallet_type: WalletType) -> int: # pragma: no cover
WalletType.POOLING_WALLET,
WalletType.DATA_LAYER,
WalletType.VC,
+ WalletType.DAO,
}:
mojo_per_unit = units["chia"]
elif wallet_type in {WalletType.CAT, WalletType.CRCAT}:
@@ -877,6 +878,10 @@ async def print_balances(
my_did = get_did_response["did_id"]
if my_did is not None and len(my_did) > 0:
print(f"{indent}{'-DID ID:'.ljust(ljust)} {my_did}")
+ elif typ == WalletType.DAO:
+ get_id_response = await wallet_client.dao_get_treasury_id(wallet_id)
+ treasury_id = get_id_response["treasury_id"][2:]
+ print(f"{indent}{'-Treasury ID:'.ljust(ljust)} {treasury_id}")
elif len(asset_id) > 0:
print(f"{indent}{'-Asset ID:'.ljust(ljust)} {asset_id}")
print(f"{indent}{'-Wallet ID:'.ljust(ljust)} {wallet_id}")
diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py
--- a/chia/rpc/wallet_rpc_api.py
+++ b/chia/rpc/wallet_rpc_api.py
@@ -43,7 +43,17 @@
from chia.wallet.cat_wallet.cat_constants import DEFAULT_CATS
from chia.wallet.cat_wallet.cat_info import CRCATInfo
from chia.wallet.cat_wallet.cat_wallet import CATWallet
+from chia.wallet.cat_wallet.dao_cat_info import LockedCoinInfo
+from chia.wallet.cat_wallet.dao_cat_wallet import DAOCATWallet
from chia.wallet.conditions import Condition
+from chia.wallet.dao_wallet.dao_info import DAORules
+from chia.wallet.dao_wallet.dao_utils import (
+ generate_mint_proposal_innerpuz,
+ generate_simple_proposal_innerpuz,
+ generate_update_proposal_innerpuz,
+ get_treasury_rules_from_puzzle,
+)
+from chia.wallet.dao_wallet.dao_wallet import DAOWallet
from chia.wallet.derive_keys import (
MAX_POOL_WALLETS,
master_sk_to_farmer_sk,
@@ -56,9 +66,9 @@
from chia.wallet.did_wallet.did_wallet import DIDWallet
from chia.wallet.did_wallet.did_wallet_puzzles import (
DID_INNERPUZ_MOD,
+ did_program_to_metadata,
match_did_puzzle,
metadata_to_program,
- program_to_metadata,
)
from chia.wallet.nft_wallet import nft_puzzles
from chia.wallet.nft_wallet.nft_info import NFTCoinInfo, NFTInfo
@@ -71,7 +81,11 @@
from chia.wallet.puzzle_drivers import PuzzleInfo, Solver
from chia.wallet.puzzles.clawback.metadata import AutoClaimSettings, ClawbackMetadata
from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import puzzle_hash_for_synthetic_public_key
-from chia.wallet.singleton import create_singleton_puzzle, get_inner_puzzle_from_singleton
+from chia.wallet.singleton import (
+ SINGLETON_LAUNCHER_PUZZLE_HASH,
+ create_singleton_puzzle,
+ get_inner_puzzle_from_singleton,
+)
from chia.wallet.trade_record import TradeRecord
from chia.wallet.trading.offer import Offer
from chia.wallet.transaction_record import TransactionRecord
@@ -202,6 +216,21 @@ def get_routes(self) -> Dict[str, Endpoint]:
"/did_message_spend": self.did_message_spend,
"/did_get_info": self.did_get_info,
"/did_find_lost_did": self.did_find_lost_did,
+ # DAO Wallets
+ "/dao_get_proposals": self.dao_get_proposals,
+ "/dao_create_proposal": self.dao_create_proposal,
+ "/dao_parse_proposal": self.dao_parse_proposal,
+ "/dao_vote_on_proposal": self.dao_vote_on_proposal,
+ "/dao_get_treasury_balance": self.dao_get_treasury_balance,
+ "/dao_get_treasury_id": self.dao_get_treasury_id,
+ "/dao_get_rules": self.dao_get_rules,
+ "/dao_close_proposal": self.dao_close_proposal,
+ "/dao_exit_lockup": self.dao_exit_lockup,
+ "/dao_adjust_filter_level": self.dao_adjust_filter_level,
+ "/dao_add_funds_to_treasury": self.dao_add_funds_to_treasury,
+ "/dao_send_to_lockup": self.dao_send_to_lockup,
+ "/dao_get_proposal_state": self.dao_get_proposal_state,
+ "/dao_free_coins_from_finished_proposals": self.dao_free_coins_from_finished_proposals,
# NFT Wallet
"/nft_mint_nft": self.nft_mint_nft,
"/nft_count_nfts": self.nft_count_nfts,
@@ -751,6 +780,44 @@ async def create_new_wallet(
}
else: # undefined did_type
pass
+ elif request["wallet_type"] == "dao_wallet":
+ name = request.get("name", None)
+ mode = request.get("mode", None)
+ if mode == "new":
+ dao_rules_json = request.get("dao_rules", None)
+ if dao_rules_json:
+ dao_rules = DAORules.from_json_dict(dao_rules_json)
+ else:
+ raise ValueError("DAO rules must be specified for wallet creation")
+ async with self.service.wallet_state_manager.lock:
+ dao_wallet = await DAOWallet.create_new_dao_and_wallet(
+ wallet_state_manager,
+ main_wallet,
+ uint64(request.get("amount_of_cats", None)),
+ dao_rules,
+ tx_config,
+ uint64(request.get("filter_amount", 1)),
+ name,
+ uint64(request.get("fee", 0)),
+ uint64(request.get("fee_for_cat", 0)),
+ )
+ elif mode == "existing":
+ # async with self.service.wallet_state_manager.lock:
+ dao_wallet = await DAOWallet.create_new_dao_wallet_for_existing_dao(
+ wallet_state_manager,
+ main_wallet,
+ bytes32.from_hexstr(request.get("treasury_id", None)),
+ uint64(request.get("filter_amount", 1)),
+ name,
+ )
+ return {
+ "success": True,
+ "type": dao_wallet.type(),
+ "wallet_id": dao_wallet.id(),
+ "treasury_id": dao_wallet.dao_info.treasury_id,
+ "cat_wallet_id": dao_wallet.dao_info.cat_wallet_id,
+ "dao_cat_wallet_id": dao_wallet.dao_info.dao_cat_wallet_id,
+ }
elif request["wallet_type"] == "nft_wallet":
for wallet in self.service.wallet_state_manager.wallets.values():
did_id: Optional[bytes32] = None
@@ -2120,7 +2187,7 @@ async def did_get_info(self, request: Dict[str, Any]) -> EndpointResult:
"public_key": public_key.atom.hex(),
"recovery_list_hash": recovery_list_hash.atom.hex(),
"num_verification": num_verification.as_int(),
- "metadata": program_to_metadata(metadata),
+ "metadata": did_program_to_metadata(metadata),
"launcher_id": singleton_struct.rest().first().atom.hex(),
"full_puzzle": full_puzzle,
"solution": Program.from_bytes(bytes(coin_spend.solution)).as_python(),
@@ -2297,7 +2364,7 @@ async def did_find_lost_did(self, request: Dict[str, Any]) -> EndpointResult:
None,
None,
False,
- json.dumps(did_wallet_puzzles.program_to_metadata(metadata)),
+ json.dumps(did_wallet_puzzles.did_program_to_metadata(metadata)),
)
await did_wallet.save_info(did_info)
await self.service.wallet_state_manager.update_wallet_puzzle_hashes(did_wallet.wallet_info.id)
@@ -2528,6 +2595,347 @@ async def did_transfer_did(
"transaction_id": txs.name,
}
+ ##########################################################################################
+ # DAO Wallet
+ ##########################################################################################
+
+ async def dao_adjust_filter_level(self, request: Dict[str, Any]) -> EndpointResult:
+ wallet_id = uint32(request["wallet_id"])
+ dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet)
+ await dao_wallet.adjust_filter_level(uint64(request["filter_level"]))
+ return {
+ "success": True,
+ "dao_info": dao_wallet.dao_info,
+ }
+
+ @tx_endpoint
+ async def dao_add_funds_to_treasury(
+ self,
+ request: Dict[str, Any],
+ tx_config: TXConfig = DEFAULT_TX_CONFIG,
+ push: bool = True,
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> EndpointResult:
+ wallet_id = uint32(request["wallet_id"])
+ dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet)
+ funding_wallet_id = uint32(request["funding_wallet_id"])
+ wallet_type = self.service.wallet_state_manager.wallets[funding_wallet_id].type()
+ amount = request.get("amount")
+ assert amount
+ if wallet_type not in [WalletType.STANDARD_WALLET, WalletType.CAT]: # pragma: no cover
+ raise ValueError(f"Cannot fund a treasury with assets from a {wallet_type.name} wallet")
+ funding_tx = await dao_wallet.create_add_funds_to_treasury_spend(
+ uint64(amount),
+ tx_config,
+ fee=uint64(request.get("fee", 0)),
+ funding_wallet_id=funding_wallet_id,
+ extra_conditions=extra_conditions,
+ )
+ if push:
+ await self.service.wallet_state_manager.add_pending_transaction(funding_tx)
+ return {"success": True, "tx_id": funding_tx.name, "tx": funding_tx}
+
+ async def dao_get_treasury_balance(self, request: Dict[str, Any]) -> EndpointResult:
+ wallet_id = uint32(request["wallet_id"])
+ dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet)
+ assert dao_wallet is not None
+ asset_list = dao_wallet.dao_info.assets
+ balances = {}
+ for asset_id in asset_list:
+ balance = await dao_wallet.get_balance_by_asset_type(asset_id=asset_id)
+ if asset_id is None:
+ balances["xch"] = balance
+ else:
+ balances[asset_id.hex()] = balance
+ return {"success": True, "balances": balances}
+
+ async def dao_get_treasury_id(self, request: Dict[str, Any]) -> EndpointResult:
+ wallet_id = uint32(request["wallet_id"])
+ dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet)
+ assert dao_wallet is not None
+ treasury_id = dao_wallet.dao_info.treasury_id
+ return {"treasury_id": treasury_id}
+
+ async def dao_get_rules(self, request: Dict[str, Any]) -> EndpointResult:
+ wallet_id = uint32(request["wallet_id"])
+ dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet)
+ assert dao_wallet is not None
+ rules = dao_wallet.dao_rules
+ return {"rules": rules}
+
+ @tx_endpoint
+ async def dao_send_to_lockup(
+ self,
+ request: Dict[str, Any],
+ tx_config: TXConfig = DEFAULT_TX_CONFIG,
+ push: bool = True,
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> EndpointResult:
+ wallet_id = uint32(request["wallet_id"])
+ dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet)
+ dao_cat_wallet = self.service.wallet_state_manager.get_wallet(
+ id=dao_wallet.dao_info.dao_cat_wallet_id, required_type=DAOCATWallet
+ )
+ amount = uint64(request["amount"])
+ fee = uint64(request.get("fee", 0))
+ txs = await dao_cat_wallet.enter_dao_cat_voting_mode(
+ amount,
+ tx_config,
+ fee=fee,
+ extra_conditions=extra_conditions,
+ )
+ if push:
+ for tx in txs:
+ await self.service.wallet_state_manager.add_pending_transaction(tx)
+ return {
+ "success": True,
+ "tx_id": txs[0].name,
+ "txs": txs,
+ }
+
+ async def dao_get_proposals(self, request: Dict[str, Any]) -> EndpointResult:
+ wallet_id = uint32(request["wallet_id"])
+ include_closed = request.get("include_closed", True)
+ dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet)
+ assert dao_wallet is not None
+ proposal_list = dao_wallet.dao_info.proposals_list
+ if not include_closed:
+ proposal_list = [prop for prop in proposal_list if not prop.closed]
+ dao_rules = get_treasury_rules_from_puzzle(dao_wallet.dao_info.current_treasury_innerpuz)
+ return {
+ "success": True,
+ "proposals": proposal_list,
+ "proposal_timelock": dao_rules.proposal_timelock,
+ "soft_close_length": dao_rules.soft_close_length,
+ }
+
+ async def dao_get_proposal_state(self, request: Dict[str, Any]) -> EndpointResult:
+ wallet_id = uint32(request["wallet_id"])
+ dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet)
+ assert dao_wallet is not None
+ state = await dao_wallet.get_proposal_state(bytes32.from_hexstr(request["proposal_id"]))
+ return {"success": True, "state": state}
+
+ @tx_endpoint
+ async def dao_exit_lockup(
+ self,
+ request: Dict[str, Any],
+ tx_config: TXConfig = DEFAULT_TX_CONFIG,
+ push: bool = True,
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> EndpointResult:
+ wallet_id = uint32(request["wallet_id"])
+ dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet)
+ assert dao_wallet is not None
+ dao_cat_wallet = self.service.wallet_state_manager.get_wallet(
+ id=dao_wallet.dao_info.dao_cat_wallet_id, required_type=DAOCATWallet
+ )
+ assert dao_cat_wallet is not None
+ if request["coins"]: # pragma: no cover
+ coin_list = [Coin.from_json_dict(coin) for coin in request["coins"]]
+ coins: List[LockedCoinInfo] = []
+ for lci in dao_cat_wallet.dao_cat_info.locked_coins:
+ if lci.coin in coin_list:
+ coins.append(lci)
+ else:
+ coins = []
+ for lci in dao_cat_wallet.dao_cat_info.locked_coins:
+ if lci.active_votes == []:
+ coins.append(lci)
+ fee = uint64(request.get("fee", 0))
+ exit_tx = await dao_cat_wallet.exit_vote_state(
+ coins,
+ tx_config,
+ fee=fee,
+ extra_conditions=extra_conditions,
+ )
+ if push:
+ await self.service.wallet_state_manager.add_pending_transaction(exit_tx)
+ return {"success": True, "tx_id": exit_tx.name, "tx": exit_tx}
+
+ @tx_endpoint
+ async def dao_create_proposal(
+ self,
+ request: Dict[str, Any],
+ tx_config: TXConfig = DEFAULT_TX_CONFIG,
+ push: bool = True,
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> EndpointResult:
+ wallet_id = uint32(request["wallet_id"])
+ dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet)
+ assert dao_wallet is not None
+
+ if request["proposal_type"] == "spend":
+ amounts: List[uint64] = []
+ puzzle_hashes: List[bytes32] = []
+ asset_types: List[Optional[bytes32]] = []
+ additions: Optional[List[Dict[str, Any]]] = request.get("additions")
+ if additions is not None:
+ for addition in additions:
+ if "asset_id" in addition:
+ asset_id = bytes32.from_hexstr(addition["asset_id"])
+ else:
+ asset_id = None
+ receiver_ph = bytes32.from_hexstr(addition["puzzle_hash"])
+ amount = uint64(addition["amount"])
+ amounts.append(amount)
+ puzzle_hashes.append(receiver_ph)
+ asset_types.append(asset_id)
+ else: # pragma: no cover
+ amounts.append(uint64(request["amount"]))
+ puzzle_hashes.append(decode_puzzle_hash(request["inner_address"]))
+ if request["asset_id"] is not None:
+ asset_types.append(bytes32.from_hexstr(request["asset_id"]))
+ else:
+ asset_types.append(None)
+ proposed_puzzle = generate_simple_proposal_innerpuz(
+ dao_wallet.dao_info.treasury_id, puzzle_hashes, amounts, asset_types
+ )
+
+ elif request["proposal_type"] == "update":
+ rules = dao_wallet.dao_rules
+ prop = request["new_dao_rules"]
+ new_rules = DAORules(
+ proposal_timelock=prop.get("proposal_timelock") or rules.proposal_timelock,
+ soft_close_length=prop.get("soft_close_length") or rules.soft_close_length,
+ attendance_required=prop.get("attendance_required") or rules.attendance_required,
+ proposal_minimum_amount=prop.get("proposal_minimum_amount") or rules.proposal_minimum_amount,
+ pass_percentage=prop.get("pass_percentage") or rules.pass_percentage,
+ self_destruct_length=prop.get("self_destruct_length") or rules.self_destruct_length,
+ oracle_spend_delay=prop.get("oracle_spend_delay") or rules.oracle_spend_delay,
+ )
+
+ current_innerpuz = dao_wallet.dao_info.current_treasury_innerpuz
+ assert current_innerpuz is not None
+ proposed_puzzle = await generate_update_proposal_innerpuz(current_innerpuz, new_rules)
+ elif request["proposal_type"] == "mint":
+ amount_of_cats = uint64(request["amount"])
+ mint_address = decode_puzzle_hash(request["cat_target_address"])
+ cat_wallet = self.service.wallet_state_manager.get_wallet(
+ id=dao_wallet.dao_info.cat_wallet_id, required_type=CATWallet
+ )
+ proposed_puzzle = await generate_mint_proposal_innerpuz(
+ dao_wallet.dao_info.treasury_id,
+ cat_wallet.cat_info.limitations_program_hash,
+ amount_of_cats,
+ mint_address,
+ )
+ else: # pragma: no cover
+ return {"success": False, "error": "Unknown proposal type."}
+
+ vote_amount = request.get("vote_amount")
+ fee = uint64(request.get("fee", 0))
+ proposal_tx = await dao_wallet.generate_new_proposal(
+ proposed_puzzle,
+ tx_config,
+ vote_amount=vote_amount,
+ fee=fee,
+ extra_conditions=extra_conditions,
+ )
+ assert proposal_tx is not None
+ await self.service.wallet_state_manager.add_pending_transaction(proposal_tx)
+ assert isinstance(proposal_tx.removals, List)
+ for coin in proposal_tx.removals:
+ if coin.puzzle_hash == SINGLETON_LAUNCHER_PUZZLE_HASH:
+ proposal_id = coin.name()
+ break
+ else: # pragma: no cover
+ raise ValueError("Could not find proposal ID in transaction")
+ return {
+ "success": True,
+ "proposal_id": proposal_id,
+ "tx_id": proposal_tx.name.hex(),
+ "tx": proposal_tx,
+ }
+
+ @tx_endpoint
+ async def dao_vote_on_proposal(
+ self,
+ request: Dict[str, Any],
+ tx_config: TXConfig = DEFAULT_TX_CONFIG,
+ push: bool = True,
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> EndpointResult:
+ wallet_id = uint32(request["wallet_id"])
+ dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet)
+ assert dao_wallet is not None
+ vote_amount = None
+ if "vote_amount" in request:
+ vote_amount = uint64(request["vote_amount"])
+ fee = uint64(request.get("fee", 0))
+ vote_tx = await dao_wallet.generate_proposal_vote_spend(
+ bytes32.from_hexstr(request["proposal_id"]),
+ vote_amount,
+ request["is_yes_vote"], # bool
+ tx_config,
+ fee,
+ extra_conditions=extra_conditions,
+ )
+ assert vote_tx is not None
+ if push:
+ await self.service.wallet_state_manager.add_pending_transaction(vote_tx)
+ return {"success": True, "tx_id": vote_tx.name, "tx": vote_tx}
+
+ async def dao_parse_proposal(self, request: Dict[str, Any]) -> EndpointResult:
+ wallet_id = uint32(request["wallet_id"])
+ dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet)
+ assert dao_wallet is not None
+ proposal_id = bytes32.from_hexstr(request["proposal_id"])
+ proposal_dictionary = await dao_wallet.parse_proposal(proposal_id)
+ assert proposal_dictionary is not None
+ return {"success": True, "proposal_dictionary": proposal_dictionary}
+
+ @tx_endpoint
+ async def dao_close_proposal(
+ self,
+ request: Dict[str, Any],
+ tx_config: TXConfig = DEFAULT_TX_CONFIG,
+ push: bool = True,
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> EndpointResult:
+ wallet_id = uint32(request["wallet_id"])
+ dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet)
+ assert dao_wallet is not None
+ fee = uint64(request.get("fee", 0))
+ if "genesis_id" in request: # pragma: no cover
+ genesis_id = bytes32.from_hexstr(request["genesis_id"])
+ else:
+ genesis_id = None
+ self_destruct = request.get("self_destruct", None)
+ tx = await dao_wallet.create_proposal_close_spend(
+ bytes32.from_hexstr(request["proposal_id"]),
+ tx_config,
+ genesis_id,
+ fee=fee,
+ self_destruct=self_destruct,
+ extra_conditions=extra_conditions,
+ )
+ assert tx is not None
+ await self.service.wallet_state_manager.add_pending_transaction(tx)
+ return {"success": True, "tx_id": tx.name, "tx": tx}
+
+ @tx_endpoint
+ async def dao_free_coins_from_finished_proposals(
+ self,
+ request: Dict[str, Any],
+ tx_config: TXConfig = DEFAULT_TX_CONFIG,
+ push: bool = True,
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> EndpointResult:
+ wallet_id = uint32(request["wallet_id"])
+ fee = uint64(request.get("fee", 0))
+ dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet)
+ assert dao_wallet is not None
+ tx = await dao_wallet.free_coins_from_finished_proposals(
+ tx_config,
+ fee=fee,
+ extra_conditions=extra_conditions,
+ )
+ assert tx is not None
+ await self.service.wallet_state_manager.add_pending_transaction(tx)
+
+ return {"success": True, "tx_id": tx.name, "tx": tx}
+
##########################################################################################
# NFT Wallet
##########################################################################################
diff --git a/chia/rpc/wallet_rpc_client.py b/chia/rpc/wallet_rpc_client.py
--- a/chia/rpc/wallet_rpc_client.py
+++ b/chia/rpc/wallet_rpc_client.py
@@ -1343,6 +1343,224 @@ async def sign_message_by_id(self, id: str, message: str) -> Tuple[str, str, str
response = await self.fetch("sign_message_by_id", {"id": id, "message": message})
return response["pubkey"], response["signature"], response["signing_mode"]
+ # DAOs
+ async def create_new_dao_wallet(
+ self,
+ mode: str,
+ tx_config: TXConfig,
+ dao_rules: Optional[Dict[str, uint64]] = None,
+ amount_of_cats: Optional[uint64] = None,
+ treasury_id: Optional[bytes32] = None,
+ filter_amount: uint64 = uint64(1),
+ name: Optional[str] = None,
+ fee: uint64 = uint64(0),
+ fee_for_cat: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> Dict:
+ request: Dict[str, Any] = {
+ "wallet_type": "dao_wallet",
+ "mode": mode,
+ "treasury_id": treasury_id,
+ "dao_rules": dao_rules,
+ "amount_of_cats": amount_of_cats,
+ "filter_amount": filter_amount,
+ "name": name,
+ "fee": fee,
+ "fee_for_cat": fee_for_cat,
+ "extra_conditions": list(extra_conditions),
+ **tx_config.to_json_dict(),
+ }
+ response = await self.fetch("create_new_wallet", request)
+ return response
+
+ async def dao_get_treasury_id(
+ self,
+ wallet_id: int,
+ ) -> Dict:
+ request: Dict[str, Any] = {"wallet_id": wallet_id}
+ response = await self.fetch("dao_get_treasury_id", request)
+ return response
+
+ async def dao_get_rules(
+ self,
+ wallet_id: int,
+ ) -> Dict:
+ request: Dict[str, Any] = {"wallet_id": wallet_id}
+ response = await self.fetch("dao_get_rules", request)
+ return response
+
+ async def dao_create_proposal(
+ self,
+ wallet_id: int,
+ proposal_type: str,
+ tx_config: TXConfig,
+ additions: Optional[List[Dict]] = None,
+ amount: Optional[uint64] = None,
+ inner_address: Optional[str] = None,
+ asset_id: Optional[str] = None,
+ cat_target_address: Optional[str] = None,
+ vote_amount: Optional[int] = None,
+ new_dao_rules: Optional[Dict[str, Optional[uint64]]] = None,
+ fee: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> Dict:
+ request: Dict[str, Any] = {
+ "wallet_id": wallet_id,
+ "proposal_type": proposal_type,
+ "additions": additions,
+ "amount": amount,
+ "inner_address": inner_address,
+ "asset_id": asset_id,
+ "cat_target_address": cat_target_address,
+ "vote_amount": vote_amount,
+ "new_dao_rules": new_dao_rules,
+ "fee": fee,
+ "extra_conditions": list(extra_conditions),
+ **tx_config.to_json_dict(),
+ }
+
+ response = await self.fetch("dao_create_proposal", request)
+ return response
+
+ async def dao_get_proposal_state(self, wallet_id: int, proposal_id: str):
+ request: Dict[str, Any] = {"wallet_id": wallet_id, "proposal_id": proposal_id}
+ response = await self.fetch("dao_get_proposal_state", request)
+ return response
+
+ async def dao_parse_proposal(self, wallet_id: int, proposal_id: str):
+ request: Dict[str, Any] = {"wallet_id": wallet_id, "proposal_id": proposal_id}
+ response = await self.fetch("dao_parse_proposal", request)
+ return response
+
+ async def dao_vote_on_proposal(
+ self,
+ wallet_id: int,
+ proposal_id: str,
+ vote_amount: uint64,
+ tx_config: TXConfig,
+ is_yes_vote: bool = True,
+ fee: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ):
+ request: Dict[str, Any] = {
+ "wallet_id": wallet_id,
+ "proposal_id": proposal_id,
+ "vote_amount": vote_amount,
+ "is_yes_vote": is_yes_vote,
+ "fee": fee,
+ "extra_conditions": list(extra_conditions),
+ **tx_config.to_json_dict(),
+ }
+ response = await self.fetch("dao_vote_on_proposal", request)
+ return response
+
+ async def dao_get_proposals(self, wallet_id: int, include_closed: bool = True):
+ request: Dict[str, Any] = {"wallet_id": wallet_id, "include_closed": include_closed}
+ response = await self.fetch("dao_get_proposals", request)
+ return response
+
+ async def dao_close_proposal(
+ self,
+ wallet_id: int,
+ proposal_id: str,
+ tx_config: TXConfig,
+ self_destruct: bool = None,
+ fee: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ):
+ request: Dict[str, Any] = {
+ "wallet_id": wallet_id,
+ "proposal_id": proposal_id,
+ "self_destruct": self_destruct,
+ "fee": fee,
+ "extra_conditions": list(extra_conditions),
+ **tx_config.to_json_dict(),
+ }
+ response = await self.fetch("dao_close_proposal", request)
+ return response
+
+ async def dao_free_coins_from_finished_proposals(
+ self,
+ wallet_id: int,
+ tx_config: TXConfig,
+ fee: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ):
+ request: Dict[str, Any] = {
+ "wallet_id": wallet_id,
+ "fee": fee,
+ "extra_conditions": list(extra_conditions),
+ **tx_config.to_json_dict(),
+ }
+ response = await self.fetch("dao_free_coins_from_finished_proposals", request)
+ return response
+
+ async def dao_get_treasury_balance(self, wallet_id: int):
+ request: Dict[str, Any] = {"wallet_id": wallet_id}
+ response = await self.fetch("dao_get_treasury_balance", request)
+ return response
+
+ async def dao_add_funds_to_treasury(
+ self,
+ wallet_id: int,
+ funding_wallet_id: int,
+ amount: uint64,
+ tx_config: TXConfig,
+ fee: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ):
+ request: Dict[str, Any] = {
+ "wallet_id": wallet_id,
+ "funding_wallet_id": funding_wallet_id,
+ "amount": amount,
+ "fee": fee,
+ "extra_conditions": list(extra_conditions),
+ **tx_config.to_json_dict(),
+ }
+ response = await self.fetch("dao_add_funds_to_treasury", request)
+ return response
+
+ async def dao_send_to_lockup(
+ self,
+ wallet_id: int,
+ amount: uint64,
+ tx_config: TXConfig,
+ fee: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ):
+ request: Dict[str, Any] = {
+ "wallet_id": wallet_id,
+ "amount": amount,
+ "fee": fee,
+ "extra_conditions": list(extra_conditions),
+ **tx_config.to_json_dict(),
+ }
+ response = await self.fetch("dao_send_to_lockup", request)
+ return response
+
+ async def dao_exit_lockup(
+ self,
+ wallet_id: int,
+ tx_config: TXConfig,
+ coins: Optional[List[Dict]] = None,
+ fee: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ):
+ request: Dict[str, Any] = {
+ "wallet_id": wallet_id,
+ "coins": coins,
+ "fee": fee,
+ "extra_conditions": list(extra_conditions),
+ **tx_config.to_json_dict(),
+ }
+ response = await self.fetch("dao_exit_lockup", request)
+ return response
+
+ async def dao_adjust_filter_level(self, wallet_id: int, filter_level: int):
+ request: Dict[str, Any] = {"wallet_id": wallet_id, "filter_level": filter_level}
+ response = await self.fetch("dao_adjust_filter_level", request)
+ return response
+
async def vc_mint(
self,
did_id: bytes32,
diff --git a/chia/wallet/cat_wallet/cat_wallet.py b/chia/wallet/cat_wallet/cat_wallet.py
--- a/chia/wallet/cat_wallet/cat_wallet.py
+++ b/chia/wallet/cat_wallet/cat_wallet.py
@@ -813,7 +813,7 @@ async def generate_signed_transaction(
if not ignore_max_send_amount:
max_send = await self.get_max_send_amount()
if payment_sum > max_send:
- raise ValueError(f"Can't send more than {max_send} mojos in a single transaction")
+ raise ValueError(f" Insufficient funds. Your max amount is {max_send} mojos in a single transaction.")
unsigned_spend_bundle, chia_tx = await self.generate_unsigned_spendbundle(
payments,
tx_config,
@@ -870,7 +870,6 @@ async def generate_signed_transaction(
valid_times=parse_timelock_info(extra_conditions),
)
)
-
return tx_list
async def add_lineage(self, name: bytes32, lineage: Optional[LineageProof]) -> None:
diff --git a/chia/wallet/cat_wallet/dao_cat_info.py b/chia/wallet/cat_wallet/dao_cat_info.py
new file mode 100644
--- /dev/null
+++ b/chia/wallet/cat_wallet/dao_cat_info.py
@@ -0,0 +1,28 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import List, Optional
+
+from chia.types.blockchain_format.coin import Coin
+from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.util.ints import uint64
+from chia.util.streamable import Streamable, streamable
+
+
+@streamable
+@dataclass(frozen=True)
+class LockedCoinInfo(Streamable):
+ coin: Coin
+ inner_puzzle: Program # This is the lockup puzzle, not the lockup_puzzle's inner_puzzle
+ active_votes: List[Optional[bytes32]]
+
+
+@streamable
+@dataclass(frozen=True)
+class DAOCATInfo(Streamable):
+ dao_wallet_id: uint64
+ free_cat_wallet_id: uint64
+ limitations_program_hash: bytes32
+ my_tail: Optional[Program] # this is the program
+ locked_coins: List[LockedCoinInfo]
diff --git a/chia/wallet/cat_wallet/dao_cat_wallet.py b/chia/wallet/cat_wallet/dao_cat_wallet.py
new file mode 100644
--- /dev/null
+++ b/chia/wallet/cat_wallet/dao_cat_wallet.py
@@ -0,0 +1,677 @@
+from __future__ import annotations
+
+import logging
+import time
+from secrets import token_bytes
+from typing import TYPE_CHECKING, Any, ClassVar, List, Optional, Set, Tuple, cast
+
+from blspy import G1Element
+
+from chia.server.ws_connection import WSChiaConnection
+from chia.types.blockchain_format.coin import Coin
+from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.spend_bundle import SpendBundle
+from chia.util.byte_types import hexstr_to_bytes
+from chia.util.ints import uint32, uint64, uint128
+from chia.wallet.cat_wallet.cat_utils import (
+ CAT_MOD,
+ SpendableCAT,
+ construct_cat_puzzle,
+ unsigned_spend_bundle_for_spendable_cats,
+)
+from chia.wallet.cat_wallet.cat_wallet import CATWallet
+from chia.wallet.cat_wallet.dao_cat_info import DAOCATInfo, LockedCoinInfo
+from chia.wallet.cat_wallet.lineage_store import CATLineageStore
+from chia.wallet.conditions import Condition, parse_timelock_info
+from chia.wallet.dao_wallet.dao_utils import (
+ add_proposal_to_active_list,
+ get_active_votes_from_lockup_puzzle,
+ get_finished_state_inner_puzzle,
+ get_innerpuz_from_lockup_puzzle,
+ get_lockup_puzzle,
+)
+from chia.wallet.lineage_proof import LineageProof
+from chia.wallet.payment import Payment
+from chia.wallet.transaction_record import TransactionRecord
+from chia.wallet.util.curry_and_treehash import calculate_hash_of_quoted_mod_hash
+from chia.wallet.util.transaction_type import TransactionType
+from chia.wallet.util.tx_config import CoinSelectionConfig, TXConfig
+from chia.wallet.util.wallet_sync_utils import fetch_coin_spend
+from chia.wallet.util.wallet_types import WalletType
+from chia.wallet.wallet import Wallet
+from chia.wallet.wallet_coin_record import WalletCoinRecord
+from chia.wallet.wallet_info import WalletInfo
+
+if TYPE_CHECKING:
+ from chia.wallet.wallet_state_manager import WalletStateManager
+
+CAT_MOD_HASH = CAT_MOD.get_tree_hash()
+CAT_MOD_HASH_HASH = Program.to(CAT_MOD_HASH).get_tree_hash()
+QUOTED_MOD_HASH = calculate_hash_of_quoted_mod_hash(CAT_MOD_HASH)
+
+
+class DAOCATWallet:
+ if TYPE_CHECKING:
+ from chia.wallet.wallet_protocol import WalletProtocol
+
+ _protocol_check: ClassVar[WalletProtocol[DAOCATInfo]] = cast("DAOCATWallet", None)
+
+ wallet_state_manager: Any
+ log: logging.Logger
+ wallet_info: WalletInfo
+ dao_cat_info: DAOCATInfo
+ standard_wallet: Wallet
+ cost_of_single_tx: Optional[int]
+ lineage_store: CATLineageStore
+
+ @classmethod
+ def type(cls) -> WalletType:
+ return WalletType.DAO_CAT
+
+ @staticmethod
+ async def create(
+ wallet_state_manager: WalletStateManager,
+ wallet: Wallet,
+ wallet_info: WalletInfo,
+ ) -> DAOCATWallet:
+ self = DAOCATWallet()
+ self.log = logging.getLogger(__name__)
+
+ self.cost_of_single_tx = None
+ self.wallet_state_manager = wallet_state_manager
+ self.wallet_info = wallet_info
+ self.standard_wallet = wallet
+ try:
+ self.dao_cat_info = DAOCATInfo.from_bytes(hexstr_to_bytes(self.wallet_info.data))
+ self.lineage_store = await CATLineageStore.create(self.wallet_state_manager.db_wrapper, self.get_asset_id())
+ except AssertionError as e: # pragma: no cover
+ self.log.error(f"Error creating DAO CAT wallet: {e}")
+
+ return self
+
+ @staticmethod
+ async def get_or_create_wallet_for_cat(
+ wallet_state_manager: Any,
+ wallet: Wallet,
+ limitations_program_hash_hex: str,
+ name: Optional[str] = None,
+ ) -> DAOCATWallet:
+ self = DAOCATWallet()
+ self.cost_of_single_tx = None
+ self.standard_wallet = wallet
+ self.log = logging.getLogger(__name__)
+
+ limitations_program_hash_hex = bytes32.from_hexstr(limitations_program_hash_hex).hex() # Normalize the format
+
+ dao_wallet_id = None
+ free_cat_wallet_id = None
+ for id, w in wallet_state_manager.wallets.items():
+ if w.type() == DAOCATWallet.type():
+ assert isinstance(w, DAOCATWallet)
+ if w.get_asset_id() == limitations_program_hash_hex:
+ self.log.warning("Not creating wallet for already existing DAO CAT wallet")
+ return w
+ elif w.type() == CATWallet.type():
+ assert isinstance(w, CATWallet)
+ if w.get_asset_id() == limitations_program_hash_hex:
+ free_cat_wallet_id = w.id()
+ assert free_cat_wallet_id is not None
+ for id, w in wallet_state_manager.wallets.items():
+ if w.type() == WalletType.DAO:
+ self.log.info(f"FOUND DAO WALLET: {w}")
+ self.log.info(f"ALL WALLETS: {wallet_state_manager.wallets}")
+ if w.get_cat_wallet_id() == free_cat_wallet_id:
+ dao_wallet_id = w.id()
+ assert dao_wallet_id is not None
+ self.wallet_state_manager = wallet_state_manager
+ if name is None:
+ name = CATWallet.default_wallet_name_for_unknown_cat(limitations_program_hash_hex)
+
+ limitations_program_hash = bytes32(hexstr_to_bytes(limitations_program_hash_hex))
+
+ self.dao_cat_info = DAOCATInfo(
+ dao_wallet_id,
+ uint64(free_cat_wallet_id),
+ limitations_program_hash,
+ None,
+ [],
+ )
+ info_as_string = bytes(self.dao_cat_info).hex()
+ self.wallet_info = await wallet_state_manager.user_store.create_wallet(name, WalletType.DAO_CAT, info_as_string)
+
+ self.lineage_store = await CATLineageStore.create(self.wallet_state_manager.db_wrapper, self.get_asset_id())
+ await self.wallet_state_manager.add_new_wallet(self)
+ return self
+
+ async def coin_added(self, coin: Coin, height: uint32, peer: WSChiaConnection, coin_data: Optional[Any]) -> None:
+ """Notification from wallet state manager that wallet has been received."""
+ self.log.info(f"DAO CAT wallet has been notified that {coin} was added")
+ wallet_node: Any = self.wallet_state_manager.wallet_node
+ parent_coin = (await wallet_node.get_coin_state([coin.parent_coin_info], peer, height))[0]
+ parent_spend = await fetch_coin_spend(height, parent_coin.coin, peer)
+ uncurried = parent_spend.puzzle_reveal.uncurry()
+ cat_inner = uncurried[1].at("rrf")
+ active_votes_list: List[Optional[bytes32]] = []
+
+ record = await self.wallet_state_manager.puzzle_store.get_derivation_record_for_puzzle_hash(coin.puzzle_hash)
+ if record:
+ inner_puzzle: Optional[Program] = self.standard_wallet.puzzle_for_pk(record.pubkey)
+ else:
+ inner_puzzle = get_innerpuz_from_lockup_puzzle(cat_inner)
+ assert isinstance(inner_puzzle, Program)
+ active_votes_list = get_active_votes_from_lockup_puzzle(cat_inner)
+ active_votes_list = [x.as_atom() for x in active_votes_list.as_iter()]
+
+ if parent_spend.coin.puzzle_hash == coin.puzzle_hash:
+ # shortcut, works for change
+ lockup_puz = cat_inner
+ else:
+ solution = parent_spend.solution.to_program().first()
+ if solution.first() == Program.to(0):
+ # No vote is being added so inner puz stays the same
+ try:
+ removals = solution.at("rrrf")
+ if removals != Program.to(0):
+ for removal in removals.as_iter():
+ active_votes_list.remove(bytes32(removal.as_atom()))
+ except Exception:
+ pass
+ else:
+ new_vote = solution.at("rrrf")
+ active_votes_list.insert(0, bytes32(new_vote.as_atom()))
+
+ lockup_puz = get_lockup_puzzle(
+ self.dao_cat_info.limitations_program_hash,
+ active_votes_list,
+ inner_puzzle,
+ )
+
+ new_cat_puzhash = construct_cat_puzzle(
+ CAT_MOD, self.dao_cat_info.limitations_program_hash, lockup_puz
+ ).get_tree_hash()
+
+ if new_cat_puzhash != coin.puzzle_hash: # pragma: no cover
+ raise ValueError(f"Cannot add coin - incorrect lockup puzzle: {coin}")
+
+ lineage_proof = LineageProof(coin.parent_coin_info, lockup_puz.get_tree_hash(), uint64(coin.amount))
+ await self.add_lineage(coin.name(), lineage_proof)
+
+ # add the new coin to the list of locked coins and remove the spent coin
+ locked_coins = [x for x in self.dao_cat_info.locked_coins if x.coin != parent_spend.coin]
+ new_info = LockedCoinInfo(coin, lockup_puz, active_votes_list)
+ if new_info not in locked_coins:
+ locked_coins.append(LockedCoinInfo(coin, lockup_puz, active_votes_list))
+ dao_cat_info: DAOCATInfo = DAOCATInfo(
+ self.dao_cat_info.dao_wallet_id,
+ self.dao_cat_info.free_cat_wallet_id,
+ self.dao_cat_info.limitations_program_hash,
+ self.dao_cat_info.my_tail,
+ locked_coins,
+ )
+ await self.save_info(dao_cat_info)
+
+ async def add_lineage(self, name: bytes32, lineage: Optional[LineageProof]) -> None:
+ """
+ Lineage proofs are stored as a list of parent coins and the lineage proof you will need if they are the
+ parent of the coin you are trying to spend. 'If I'm your parent, here's the info you need to spend yourself'
+ """
+ self.log.info(f"Adding parent {name.hex()}: {lineage}")
+ if lineage is not None:
+ await self.lineage_store.add_lineage_proof(name, lineage)
+
+ async def get_lineage_proof_for_coin(self, coin: Coin) -> Optional[LineageProof]:
+ return await self.lineage_store.get_lineage_proof(coin.parent_coin_info)
+
+ async def remove_lineage(self, name: bytes32) -> None: # pragma: no cover
+ self.log.info(f"Removing parent {name} (probably had a non-CAT parent)")
+ await self.lineage_store.remove_lineage_proof(name)
+
+ async def advanced_select_coins(self, amount: uint64, proposal_id: bytes32) -> List[LockedCoinInfo]:
+ coins = []
+ s = 0
+ for coin in self.dao_cat_info.locked_coins:
+ compatible = True
+ for active_vote in coin.active_votes:
+ if active_vote == proposal_id: # pragma: no cover
+ compatible = False
+ break
+ if compatible:
+ coins.append(coin)
+ s += coin.coin.amount
+ if s >= amount:
+ break
+ if s < amount: # pragma: no cover
+ raise ValueError(
+ "We do not have enough CATs in Voting Mode right now. "
+ "Please convert some more or try again with permission to convert."
+ )
+ return coins
+
+ def id(self) -> uint32:
+ return self.wallet_info.id
+
+ async def create_vote_spend(
+ self,
+ amount: uint64,
+ proposal_id: bytes32,
+ is_yes_vote: bool,
+ proposal_puzzle: Optional[Program] = None,
+ ) -> SpendBundle:
+ coins: List[LockedCoinInfo] = await self.advanced_select_coins(amount, proposal_id)
+ running_sum = 0 # this will be used for change calculation
+ change = sum(c.coin.amount for c in coins) - amount
+ extra_delta, limitations_solution = 0, Program.to([])
+ limitations_program_reveal = Program.to([])
+ spendable_cat_list = []
+ dao_wallet = self.wallet_state_manager.wallets[self.dao_cat_info.dao_wallet_id]
+ if proposal_puzzle is None: # pragma: no cover
+ proposal_puzzle = dao_wallet.get_proposal_puzzle(proposal_id)
+ assert proposal_puzzle is not None
+ for lci in coins:
+ coin = lci.coin
+ vote_info = 0
+ new_innerpuzzle = add_proposal_to_active_list(lci.inner_puzzle, proposal_id)
+ assert new_innerpuzzle is not None
+ standard_inner_puz = get_innerpuz_from_lockup_puzzle(new_innerpuzzle)
+ assert isinstance(standard_inner_puz, Program)
+ # add_proposal_to_active_list also verifies that the lci.inner_puzzle is accurate
+ # We must create either: one coin with the new puzzle and all our value
+ # OR
+ # a coin with the new puzzle and part of our amount AND a coin with our current puzzle and the change
+ # We must also create a puzzle announcement which announces the following:
+ # message = (sha256tree (list new_proposal_vote_id_or_removal_id vote_amount vote_info my_id))
+ message = Program.to([proposal_id, amount, is_yes_vote, coin.name()]).get_tree_hash()
+ vote_amounts_list = []
+ voting_coin_id_list = []
+ previous_votes_list = []
+ lockup_innerpuz_list = []
+ if running_sum + coin.amount <= amount:
+ vote_amount = coin.amount
+ running_sum = running_sum + coin.amount
+ primaries = [
+ Payment(
+ new_innerpuzzle.get_tree_hash(),
+ uint64(vote_amount),
+ [standard_inner_puz.get_tree_hash()],
+ )
+ ]
+ message = Program.to([proposal_id, vote_amount, is_yes_vote, coin.name()]).get_tree_hash()
+ puzzle_announcements = set([message])
+ inner_solution = self.standard_wallet.make_solution(
+ primaries=primaries, puzzle_announcements=puzzle_announcements
+ )
+ else:
+ vote_amount = amount - running_sum
+ running_sum = running_sum + coin.amount
+ primaries = [
+ Payment(
+ new_innerpuzzle.get_tree_hash(),
+ uint64(vote_amount),
+ [standard_inner_puz.get_tree_hash()],
+ ),
+ ]
+ if change > 0:
+ primaries.append(
+ Payment(
+ lci.inner_puzzle.get_tree_hash(),
+ uint64(change),
+ [lci.inner_puzzle.get_tree_hash()],
+ )
+ )
+ message = Program.to([proposal_id, vote_amount, is_yes_vote, coin.name()]).get_tree_hash()
+ puzzle_announcements = set([message])
+ inner_solution = self.standard_wallet.make_solution(
+ primaries=primaries, puzzle_announcements=puzzle_announcements
+ )
+ if is_yes_vote:
+ vote_info = 1
+ vote_amounts_list.append(vote_amount)
+ voting_coin_id_list.append(coin.name())
+ previous_votes_list.append(get_active_votes_from_lockup_puzzle(lci.inner_puzzle))
+ lockup_innerpuz_list.append(get_innerpuz_from_lockup_puzzle(lci.inner_puzzle))
+ solution = Program.to(
+ [
+ coin.name(),
+ inner_solution,
+ coin.amount,
+ proposal_id,
+ proposal_puzzle.get_tree_hash(),
+ vote_info,
+ vote_amount,
+ lci.inner_puzzle.get_tree_hash(),
+ 0,
+ ]
+ )
+ lineage_proof = await self.get_lineage_proof_for_coin(coin)
+ assert lineage_proof is not None
+ new_spendable_cat = SpendableCAT(
+ coin,
+ self.dao_cat_info.limitations_program_hash,
+ lci.inner_puzzle,
+ solution,
+ limitations_solution=limitations_solution,
+ extra_delta=extra_delta,
+ lineage_proof=lineage_proof,
+ limitations_program_reveal=limitations_program_reveal,
+ )
+ spendable_cat_list.append(new_spendable_cat)
+
+ cat_spend_bundle = unsigned_spend_bundle_for_spendable_cats(CAT_MOD, spendable_cat_list)
+ spend_bundle = await self.wallet_state_manager.sign_transaction(cat_spend_bundle.coin_spends)
+ assert isinstance(spend_bundle, SpendBundle)
+ return spend_bundle
+
+ async def enter_dao_cat_voting_mode(
+ self,
+ amount: uint64,
+ tx_config: TXConfig,
+ fee: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> List[TransactionRecord]:
+ """
+ Enter existing CATs for the DAO into voting mode
+ """
+ # check there are enough cats to convert
+ cat_wallet = self.wallet_state_manager.wallets[self.dao_cat_info.free_cat_wallet_id]
+ cat_balance = await cat_wallet.get_spendable_balance()
+ if cat_balance < amount: # pragma: no cover
+ raise ValueError(f"Insufficient CAT balance. Requested: {amount} Available: {cat_balance}")
+ # get the lockup puzzle hash
+ lockup_puzzle = await self.get_new_puzzle()
+ # create the cat spend
+ txs: List[TransactionRecord] = await cat_wallet.generate_signed_transaction(
+ [amount],
+ [lockup_puzzle.get_tree_hash()],
+ tx_config,
+ fee=fee,
+ extra_conditions=extra_conditions,
+ )
+ cat_puzzle_hash: bytes32 = construct_cat_puzzle(
+ CAT_MOD, self.dao_cat_info.limitations_program_hash, lockup_puzzle
+ ).get_tree_hash()
+ await self.wallet_state_manager.add_interested_puzzle_hashes([cat_puzzle_hash], [self.id()])
+ return txs
+
+ async def exit_vote_state(
+ self,
+ coins: List[LockedCoinInfo],
+ tx_config: TXConfig,
+ fee: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> TransactionRecord:
+ extra_delta, limitations_solution = 0, Program.to([])
+ limitations_program_reveal = Program.to([])
+ spendable_cat_list = []
+ total_amt = 0
+ spent_coins = []
+ for lci in coins:
+ coin = lci.coin
+ if tx_config.reuse_puzhash: # pragma: no cover
+ new_inner_puzhash = await self.standard_wallet.get_puzzle_hash(new=False)
+ else:
+ new_inner_puzhash = await self.standard_wallet.get_puzzle_hash(new=True)
+
+ # CREATE_COIN new_puzzle coin.amount
+ primaries = [
+ Payment(
+ new_inner_puzhash,
+ uint64(coin.amount),
+ [new_inner_puzhash],
+ ),
+ ]
+ total_amt += coin.amount
+ inner_solution = self.standard_wallet.make_solution(
+ primaries=primaries,
+ )
+ # Create the solution using only the values needed for exiting the lockup mode (my_id = 0)
+ solution = Program.to(
+ [
+ 0, # my_id
+ inner_solution,
+ coin.amount,
+ 0, # new_proposal_vote_id_or_removal_id
+ 0, # proposal_innerpuzhash
+ 0, # vote_info
+ 0, # vote_amount
+ 0, # my_inner_puzhash
+ ]
+ )
+ lineage_proof = await self.get_lineage_proof_for_coin(coin)
+ assert lineage_proof is not None
+ new_spendable_cat = SpendableCAT(
+ coin,
+ self.dao_cat_info.limitations_program_hash,
+ lci.inner_puzzle,
+ solution,
+ limitations_solution=limitations_solution,
+ extra_delta=extra_delta,
+ lineage_proof=lineage_proof,
+ limitations_program_reveal=limitations_program_reveal,
+ )
+ spendable_cat_list.append(new_spendable_cat)
+ spent_coins.append(coin)
+
+ cat_spend_bundle = unsigned_spend_bundle_for_spendable_cats(CAT_MOD, spendable_cat_list)
+ spend_bundle: SpendBundle = await self.wallet_state_manager.sign_transaction(cat_spend_bundle.coin_spends)
+
+ if fee > 0: # pragma: no cover
+ chia_tx = await self.standard_wallet.create_tandem_xch_tx(
+ fee,
+ tx_config,
+ )
+ assert chia_tx.spend_bundle is not None
+ full_spend = SpendBundle.aggregate([spend_bundle, chia_tx.spend_bundle])
+ else:
+ full_spend = spend_bundle
+
+ record = TransactionRecord(
+ confirmed_at_height=uint32(0),
+ created_at_time=uint64(int(time.time())),
+ to_puzzle_hash=new_inner_puzhash,
+ amount=uint64(total_amt),
+ fee_amount=fee,
+ confirmed=False,
+ sent=uint32(10),
+ spend_bundle=full_spend,
+ additions=full_spend.additions(),
+ removals=full_spend.removals(),
+ wallet_id=self.id(),
+ sent_to=[],
+ trade_id=None,
+ type=uint32(TransactionType.INCOMING_TX.value),
+ name=bytes32(token_bytes()),
+ memos=[],
+ valid_times=parse_timelock_info(extra_conditions),
+ )
+
+ # TODO: Hack to just drop coins from locked list. Need to catch this event in WSM to
+ # check if we're adding CATs from our DAO CAT wallet and update the locked coin list
+ # accordingly
+ new_locked_coins = [x for x in self.dao_cat_info.locked_coins if x.coin not in spent_coins]
+ dao_cat_info: DAOCATInfo = DAOCATInfo(
+ self.dao_cat_info.dao_wallet_id,
+ self.dao_cat_info.free_cat_wallet_id,
+ self.dao_cat_info.limitations_program_hash,
+ self.dao_cat_info.my_tail,
+ new_locked_coins,
+ )
+ await self.save_info(dao_cat_info)
+ return record
+
+ async def remove_active_proposal(
+ self, proposal_id_list: List[bytes32], tx_config: TXConfig, fee: uint64 = uint64(0)
+ ) -> SpendBundle:
+ locked_coins: List[Tuple[LockedCoinInfo, List[bytes32]]] = []
+ for lci in self.dao_cat_info.locked_coins:
+ my_finished_proposals = []
+ for active_vote in lci.active_votes:
+ if active_vote in proposal_id_list:
+ my_finished_proposals.append(active_vote)
+ if my_finished_proposals:
+ locked_coins.append((lci, my_finished_proposals))
+ extra_delta, limitations_solution = 0, Program.to([])
+ limitations_program_reveal = Program.to([])
+ spendable_cat_list = []
+
+ for lci_proposals_tuple in locked_coins:
+ proposal_innerpuzhashes = []
+ coin = lci_proposals_tuple[0].coin
+ lci = lci_proposals_tuple[0]
+ proposals = lci_proposals_tuple[1]
+ for proposal_id in proposals:
+ INNERPUZ = get_finished_state_inner_puzzle(proposal_id)
+ proposal_innerpuzhashes.append(INNERPUZ)
+ # new_innerpuzzle = await cat_wallet.get_new_inner_puzzle()
+ # my_id ; if my_id is 0 we do the return to return_address (exit voting mode) spend case
+ # inner_solution
+ # my_amount
+ # new_proposal_vote_id_or_removal_id ; if we're exiting fully, set this to 0
+ # proposal_curry_vals
+ # vote_info
+ # vote_amount
+ # my_puzhash
+ solution = Program.to(
+ [
+ 0,
+ 0,
+ coin.amount,
+ proposals,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ ]
+ )
+ lineage_proof = await self.get_lineage_proof_for_coin(coin)
+ assert lineage_proof is not None
+ new_spendable_cat = SpendableCAT(
+ coin,
+ self.dao_cat_info.limitations_program_hash,
+ lci.inner_puzzle,
+ solution,
+ limitations_solution=limitations_solution,
+ extra_delta=extra_delta,
+ lineage_proof=lineage_proof,
+ limitations_program_reveal=limitations_program_reveal,
+ )
+ spendable_cat_list.append(new_spendable_cat)
+
+ cat_spend_bundle = unsigned_spend_bundle_for_spendable_cats(CAT_MOD, spendable_cat_list)
+ spend_bundle = await self.wallet_state_manager.sign_transaction(cat_spend_bundle.coin_spends)
+
+ if fee > 0: # pragma: no cover
+ chia_tx = await self.standard_wallet.create_tandem_xch_tx(fee, tx_config=tx_config)
+ assert chia_tx.spend_bundle is not None
+ full_spend = SpendBundle.aggregate([spend_bundle, chia_tx.spend_bundle])
+ else:
+ full_spend = spend_bundle
+
+ return full_spend
+
+ def get_asset_id(self) -> str:
+ return bytes(self.dao_cat_info.limitations_program_hash).hex()
+
+ async def get_new_inner_hash(self, tx_config: TXConfig) -> bytes32:
+ puzzle = await self.get_new_inner_puzzle(tx_config)
+ return puzzle.get_tree_hash()
+
+ async def get_new_inner_puzzle(self, tx_config: TXConfig) -> Program:
+ return await self.standard_wallet.get_puzzle(new=not tx_config.reuse_puzhash)
+
+ async def get_new_puzzle(self) -> Program:
+ record = await self.wallet_state_manager.get_unused_derivation_record(self.id())
+ inner_puzzle = self.standard_wallet.puzzle_for_pk(record.pubkey)
+ puzzle = get_lockup_puzzle(
+ self.dao_cat_info.limitations_program_hash,
+ [],
+ inner_puzzle,
+ )
+ cat_puzzle: Program = construct_cat_puzzle(CAT_MOD, self.dao_cat_info.limitations_program_hash, puzzle)
+ await self.wallet_state_manager.add_interested_puzzle_hashes([puzzle.get_tree_hash()], [self.id()])
+ await self.wallet_state_manager.add_interested_puzzle_hashes([cat_puzzle.get_tree_hash()], [self.id()])
+ return puzzle
+
+ async def get_new_puzzlehash(self) -> bytes32:
+ puzzle = await self.get_new_puzzle()
+ return puzzle.get_tree_hash()
+
+ def puzzle_for_pk(self, pubkey: G1Element) -> Program:
+ inner_puzzle = self.standard_wallet.puzzle_for_pk(pubkey)
+ puzzle = get_lockup_puzzle(
+ self.dao_cat_info.limitations_program_hash,
+ [],
+ inner_puzzle,
+ )
+ cat_puzzle: Program = construct_cat_puzzle(CAT_MOD, self.dao_cat_info.limitations_program_hash, puzzle)
+ return cat_puzzle
+
+ def puzzle_hash_for_pk(self, pubkey: G1Element) -> bytes32:
+ puzzle = self.puzzle_for_pk(pubkey)
+ return puzzle.get_tree_hash()
+
+ def require_derivation_paths(self) -> bool:
+ return True
+
+ async def match_hinted_coin(self, coin: Coin, hint: bytes32) -> bool:
+ raise NotImplementedError("Method not implemented for DAO CAT Wallet") # pragma: no cover
+
+ async def get_spendable_balance(self, records: Optional[Set[WalletCoinRecord]] = None) -> uint128:
+ return uint128(0)
+
+ async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128:
+ amount = 0
+ for coin in self.dao_cat_info.locked_coins:
+ amount += coin.coin.amount
+ return uint128(amount)
+
+ async def get_unconfirmed_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128:
+ return uint128(0)
+
+ async def get_pending_change_balance(self) -> uint64:
+ return uint64(0)
+
+ async def select_coins(
+ self,
+ amount: uint64,
+ coin_selection_config: CoinSelectionConfig,
+ ) -> Set[Coin]:
+ return set()
+
+ async def get_max_send_amount(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128:
+ return uint128(0)
+
+ async def get_votable_balance(
+ self,
+ proposal_id: Optional[bytes32] = None,
+ include_free_cats: bool = True,
+ ) -> uint64:
+ balance = 0
+ for coin in self.dao_cat_info.locked_coins:
+ if proposal_id is not None:
+ compatible = True
+ for active_vote in coin.active_votes:
+ if active_vote == proposal_id:
+ compatible = False
+ break
+ if compatible:
+ balance += coin.coin.amount
+ else:
+ balance += coin.coin.amount
+ if include_free_cats:
+ cat_wallet = self.wallet_state_manager.wallets[self.dao_cat_info.free_cat_wallet_id]
+ cat_balance = await cat_wallet.get_spendable_balance()
+ balance += cat_balance
+ return uint64(balance)
+
+ async def save_info(self, dao_cat_info: DAOCATInfo) -> None:
+ self.dao_cat_info = dao_cat_info
+ current_info = self.wallet_info
+ data_str = bytes(dao_cat_info).hex()
+ wallet_info = WalletInfo(current_info.id, current_info.name, current_info.type, data_str)
+ self.wallet_info = wallet_info
+ await self.wallet_state_manager.user_store.update_wallet(wallet_info)
+
+ def get_name(self) -> str:
+ return self.wallet_info.name
diff --git a/chia/wallet/dao_wallet/dao_info.py b/chia/wallet/dao_wallet/dao_info.py
new file mode 100644
--- /dev/null
+++ b/chia/wallet/dao_wallet/dao_info.py
@@ -0,0 +1,61 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+from enum import Enum
+from typing import List, Optional, Tuple
+
+from chia.types.blockchain_format.coin import Coin
+from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.util.ints import uint32, uint64
+from chia.util.streamable import Streamable, streamable
+from chia.wallet.lineage_proof import LineageProof
+
+
+@streamable
+@dataclass(frozen=True)
+class ProposalInfo(Streamable):
+ proposal_id: bytes32 # this is launcher_id
+ inner_puzzle: Program
+ amount_voted: uint64
+ yes_votes: uint64
+ current_coin: Coin
+ current_innerpuz: Optional[Program]
+ timer_coin: Optional[Coin] # if this is None then the proposal has finished
+ singleton_block_height: uint32 # Block height that current proposal singleton coin was created in
+ passed: Optional[bool]
+ closed: Optional[bool]
+
+
+@streamable
+@dataclass(frozen=True)
+class DAOInfo(Streamable):
+ treasury_id: bytes32
+ cat_wallet_id: uint32
+ dao_cat_wallet_id: uint32
+ proposals_list: List[ProposalInfo]
+ parent_info: List[Tuple[bytes32, Optional[LineageProof]]] # {coin.name(): LineageProof}
+ current_treasury_coin: Optional[Coin]
+ current_treasury_innerpuz: Optional[Program]
+ singleton_block_height: uint32 # the block height that the current treasury singleton was created in
+ filter_below_vote_amount: uint64 # we ignore proposals with fewer votes than this - defaults to 1
+ assets: List[Optional[bytes32]]
+ current_height: uint64
+
+
+@streamable
+@dataclass(frozen=True)
+class DAORules(Streamable):
+ proposal_timelock: uint64
+ soft_close_length: uint64
+ attendance_required: uint64
+ pass_percentage: uint64
+ self_destruct_length: uint64
+ oracle_spend_delay: uint64
+ proposal_minimum_amount: uint64
+
+
+class ProposalType(Enum):
+ SPEND = "s"
+ UPDATE = "u"
+ MINT = "m"
diff --git a/chia/wallet/dao_wallet/dao_utils.py b/chia/wallet/dao_wallet/dao_utils.py
new file mode 100644
--- /dev/null
+++ b/chia/wallet/dao_wallet/dao_utils.py
@@ -0,0 +1,809 @@
+from __future__ import annotations
+
+import logging
+from itertools import chain
+from typing import Iterator, List, Optional, Tuple
+
+from clvm.EvalError import EvalError
+
+from chia.types.blockchain_format.coin import Coin
+from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.util.ints import uint64
+from chia.wallet.cat_wallet.cat_utils import CAT_MOD, CAT_MOD_HASH, construct_cat_puzzle
+from chia.wallet.dao_wallet.dao_info import DAORules, ProposalType
+from chia.wallet.puzzles.load_clvm import load_clvm
+from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import MOD
+from chia.wallet.singleton import get_singleton_struct_for_id
+from chia.wallet.uncurried_puzzle import UncurriedPuzzle
+
+SINGLETON_MOD: Program = load_clvm("singleton_top_layer_v1_1.clsp")
+SINGLETON_MOD_HASH: bytes32 = SINGLETON_MOD.get_tree_hash()
+SINGLETON_LAUNCHER: Program = load_clvm("singleton_launcher.clsp")
+SINGLETON_LAUNCHER_HASH: bytes32 = SINGLETON_LAUNCHER.get_tree_hash()
+DAO_LOCKUP_MOD: Program = load_clvm("dao_lockup.clsp")
+DAO_LOCKUP_MOD_HASH: bytes32 = DAO_LOCKUP_MOD.get_tree_hash()
+DAO_PROPOSAL_TIMER_MOD: Program = load_clvm("dao_proposal_timer.clsp")
+DAO_PROPOSAL_TIMER_MOD_HASH: bytes32 = DAO_PROPOSAL_TIMER_MOD.get_tree_hash()
+DAO_PROPOSAL_MOD: Program = load_clvm("dao_proposal.clsp")
+DAO_PROPOSAL_MOD_HASH: bytes32 = DAO_PROPOSAL_MOD.get_tree_hash()
+DAO_PROPOSAL_VALIDATOR_MOD: Program = load_clvm("dao_proposal_validator.clsp")
+DAO_PROPOSAL_VALIDATOR_MOD_HASH: bytes32 = DAO_PROPOSAL_VALIDATOR_MOD.get_tree_hash()
+DAO_TREASURY_MOD: Program = load_clvm("dao_treasury.clsp")
+DAO_TREASURY_MOD_HASH: bytes32 = DAO_TREASURY_MOD.get_tree_hash()
+SPEND_P2_SINGLETON_MOD: Program = load_clvm("dao_spend_p2_singleton_v2.clsp")
+SPEND_P2_SINGLETON_MOD_HASH: bytes32 = SPEND_P2_SINGLETON_MOD.get_tree_hash()
+DAO_FINISHED_STATE: Program = load_clvm("dao_finished_state.clsp")
+DAO_FINISHED_STATE_HASH: bytes32 = DAO_FINISHED_STATE.get_tree_hash()
+DAO_CAT_TAIL: Program = load_clvm(
+ "genesis_by_coin_id_or_singleton.clsp", package_or_requirement="chia.wallet.cat_wallet.puzzles"
+)
+DAO_CAT_TAIL_HASH: bytes32 = DAO_CAT_TAIL.get_tree_hash()
+DAO_CAT_LAUNCHER: Program = load_clvm("dao_cat_launcher.clsp")
+P2_SINGLETON_MOD: Program = load_clvm("p2_singleton_via_delegated_puzzle.clsp")
+P2_SINGLETON_MOD_HASH: bytes32 = P2_SINGLETON_MOD.get_tree_hash()
+DAO_UPDATE_PROPOSAL_MOD: Program = load_clvm("dao_update_proposal.clsp")
+DAO_UPDATE_PROPOSAL_MOD_HASH: bytes32 = DAO_UPDATE_PROPOSAL_MOD.get_tree_hash()
+DAO_CAT_EVE: Program = load_clvm("dao_cat_eve.clsp")
+P2_SINGLETON_AGGREGATOR_MOD: Program = load_clvm("p2_singleton_aggregator.clsp")
+
+log = logging.Logger(__name__)
+
+
+def create_cat_launcher_for_singleton_id(id: bytes32) -> Program:
+ singleton_struct = get_singleton_struct_for_id(id)
+ return DAO_CAT_LAUNCHER.curry(singleton_struct)
+
+
+def curry_cat_eve(next_puzzle_hash: bytes32) -> Program:
+ return DAO_CAT_EVE.curry(next_puzzle_hash)
+
+
+def get_treasury_puzzle(dao_rules: DAORules, treasury_id: bytes32, cat_tail_hash: bytes32) -> Program:
+ singleton_struct: Program = Program.to((SINGLETON_MOD_HASH, (treasury_id, SINGLETON_LAUNCHER_HASH)))
+ lockup_puzzle: Program = DAO_LOCKUP_MOD.curry(
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ DAO_FINISHED_STATE_HASH,
+ CAT_MOD_HASH,
+ cat_tail_hash,
+ )
+ proposal_self_hash = DAO_PROPOSAL_MOD.curry(
+ DAO_PROPOSAL_TIMER_MOD_HASH,
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ CAT_MOD_HASH,
+ DAO_FINISHED_STATE_HASH,
+ DAO_TREASURY_MOD_HASH,
+ lockup_puzzle.get_tree_hash(),
+ cat_tail_hash,
+ treasury_id,
+ ).get_tree_hash()
+
+ proposal_validator = DAO_PROPOSAL_VALIDATOR_MOD.curry(
+ singleton_struct,
+ proposal_self_hash,
+ dao_rules.proposal_minimum_amount,
+ get_p2_singleton_puzzle(
+ treasury_id
+ ).get_tree_hash(), # TODO: let people set this later - for now a hidden feature
+ )
+ puzzle = DAO_TREASURY_MOD.curry(
+ DAO_TREASURY_MOD_HASH,
+ proposal_validator,
+ dao_rules.proposal_timelock,
+ dao_rules.soft_close_length,
+ dao_rules.attendance_required,
+ dao_rules.pass_percentage,
+ dao_rules.self_destruct_length,
+ dao_rules.oracle_spend_delay,
+ )
+ return puzzle
+
+
+def get_proposal_validator(treasury_puz: Program, proposal_minimum_amount: uint64) -> Program:
+ _, uncurried_args = treasury_puz.uncurry()
+ validator: Program = uncurried_args.rest().first()
+ validator_args = validator.uncurry()[1]
+ (
+ singleton_struct,
+ proposal_self_hash,
+ _,
+ p2_puzhash,
+ ) = validator_args.as_iter()
+ proposal_validator = DAO_PROPOSAL_VALIDATOR_MOD.curry(
+ singleton_struct,
+ proposal_self_hash,
+ proposal_minimum_amount,
+ p2_puzhash,
+ )
+ return proposal_validator
+
+
+def get_update_proposal_puzzle(dao_rules: DAORules, proposal_validator: Program) -> Program:
+ validator_args = uncurry_proposal_validator(proposal_validator)
+ (
+ singleton_struct,
+ proposal_self_hash,
+ _,
+ proposal_excess_puzhash,
+ ) = validator_args.as_iter()
+ update_proposal = DAO_UPDATE_PROPOSAL_MOD.curry(
+ DAO_TREASURY_MOD_HASH,
+ DAO_PROPOSAL_VALIDATOR_MOD_HASH,
+ singleton_struct,
+ proposal_self_hash,
+ dao_rules.proposal_minimum_amount,
+ proposal_excess_puzhash,
+ dao_rules.proposal_timelock,
+ dao_rules.soft_close_length,
+ dao_rules.attendance_required,
+ dao_rules.pass_percentage,
+ dao_rules.self_destruct_length,
+ dao_rules.oracle_spend_delay,
+ )
+ return update_proposal
+
+
+def get_dao_rules_from_update_proposal(puzzle: Program) -> DAORules:
+ mod, curried_args = puzzle.uncurry()
+ if mod != DAO_UPDATE_PROPOSAL_MOD: # pragma: no cover
+ raise ValueError("Not an update proposal.")
+ (
+ _,
+ _,
+ _,
+ _,
+ proposal_minimum_amount,
+ _,
+ proposal_timelock,
+ soft_close_length,
+ attendance_required,
+ pass_percentage,
+ self_destruct_length,
+ oracle_spend_delay,
+ ) = curried_args.as_iter()
+ dao_rules = DAORules(
+ proposal_timelock.as_int(),
+ soft_close_length.as_int(),
+ attendance_required.as_int(),
+ pass_percentage.as_int(),
+ self_destruct_length.as_int(),
+ oracle_spend_delay.as_int(),
+ proposal_minimum_amount.as_int(),
+ )
+ return dao_rules
+
+
+def get_spend_p2_singleton_puzzle(
+ treasury_id: bytes32, xch_conditions: Optional[List], asset_conditions: Optional[List[Tuple]] # type: ignore
+) -> Program:
+ # TODO: typecheck get_spend_p2_singleton_puzzle arguments
+ # TODO: add tests for get_spend_p2_singleton_puzzle: pass xch_conditions as Puzzle, List and ConditionWithArgs
+ #
+
+ # CAT_MOD_HASH
+ # CONDITIONS ; XCH conditions, to be generated by the treasury
+ # LIST_OF_TAILHASH_CONDITIONS ; the delegated puzzlehash must be curried in to the proposal.
+ # ; Puzzlehash is only run in the last coin for that asset
+ # ; ((TAIL_HASH CONDITIONS) (TAIL_HASH CONDITIONS)... )
+ # P2_SINGLETON_VIA_DELEGATED_PUZZLE_PUZHASH
+ treasury_struct = Program.to((SINGLETON_MOD_HASH, (treasury_id, SINGLETON_LAUNCHER_HASH)))
+ puzzle: Program = SPEND_P2_SINGLETON_MOD.curry(
+ treasury_struct,
+ CAT_MOD_HASH,
+ xch_conditions,
+ asset_conditions,
+ P2_SINGLETON_MOD.curry(treasury_struct, P2_SINGLETON_AGGREGATOR_MOD).get_tree_hash(),
+ )
+ return puzzle
+
+
+def get_p2_singleton_puzzle(treasury_id: bytes32, asset_id: Optional[bytes32] = None) -> Program:
+ singleton_struct: Program = Program.to((SINGLETON_MOD_HASH, (treasury_id, SINGLETON_LAUNCHER_HASH)))
+ inner_puzzle = P2_SINGLETON_MOD.curry(singleton_struct, P2_SINGLETON_AGGREGATOR_MOD)
+ if asset_id:
+ # CAT
+ puzzle = CAT_MOD.curry(CAT_MOD_HASH, asset_id, inner_puzzle)
+ return Program(puzzle)
+ else:
+ # XCH
+ return inner_puzzle
+
+
+def get_p2_singleton_puzhash(treasury_id: bytes32, asset_id: Optional[bytes32] = None) -> bytes32:
+ puz = get_p2_singleton_puzzle(treasury_id, asset_id)
+ assert puz is not None
+ return puz.get_tree_hash()
+
+
+def get_lockup_puzzle(
+ cat_tail_hash: bytes32, previous_votes_list: List[Optional[bytes32]], innerpuz: Optional[Program]
+) -> Program:
+ self_hash: Program = DAO_LOCKUP_MOD.curry(
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ DAO_FINISHED_STATE_HASH,
+ CAT_MOD_HASH,
+ cat_tail_hash,
+ )
+ puzzle = self_hash.curry(
+ self_hash.get_tree_hash(),
+ previous_votes_list, # TODO: maybe format check this in this function
+ innerpuz,
+ )
+ return puzzle
+
+
+def add_proposal_to_active_list(
+ lockup_puzzle: Program, proposal_id: bytes32, inner_puzzle: Optional[Program] = None
+) -> Program:
+ curried_args, c_a = uncurry_lockup(lockup_puzzle)
+ (
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_PUZHASH,
+ DAO_FINISHED_STATE_HASH,
+ CAT_MOD_HASH,
+ CAT_TAIL_HASH,
+ ) = c_a.as_iter()
+ (SELF_HASH, ACTIVE_VOTES, INNERPUZ) = curried_args.as_iter()
+ new_active_votes = Program.to(proposal_id).cons(ACTIVE_VOTES) # (c proposal_id ACTIVE_VOTES)
+ if inner_puzzle is None:
+ inner_puzzle = INNERPUZ
+ return get_lockup_puzzle(CAT_TAIL_HASH, new_active_votes, inner_puzzle)
+
+
+def get_active_votes_from_lockup_puzzle(lockup_puzzle: Program) -> Program:
+ curried_args, c_a = uncurry_lockup(lockup_puzzle)
+ (
+ _SINGLETON_MOD_HASH,
+ _SINGLETON_LAUNCHER_HASH,
+ _DAO_FINISHED_STATE_HASH,
+ _CAT_MOD_HASH,
+ _CAT_TAIL_HASH,
+ ) = list(c_a.as_iter())
+ (
+ self_hash,
+ ACTIVE_VOTES,
+ _INNERPUZ,
+ ) = curried_args.as_iter()
+ return Program(ACTIVE_VOTES)
+
+
+def get_innerpuz_from_lockup_puzzle(lockup_puzzle: Program) -> Optional[Program]:
+ try:
+ curried_args, c_a = uncurry_lockup(lockup_puzzle)
+ except Exception as e: # pragma: no cover
+ log.debug("Could not uncurry inner puzzle from lockup: %s", e)
+ return None
+ (
+ _SINGLETON_MOD_HASH,
+ _SINGLETON_LAUNCHER_HASH,
+ _DAO_FINISHED_STATE_HASH,
+ _CAT_MOD_HASH,
+ _CAT_TAIL_HASH,
+ ) = list(c_a.as_iter())
+ (
+ self_hash,
+ _ACTIVE_VOTES,
+ INNERPUZ,
+ ) = list(curried_args.as_iter())
+ return Program(INNERPUZ)
+
+
+def get_proposal_puzzle(
+ *,
+ proposal_id: bytes32,
+ cat_tail_hash: bytes32,
+ treasury_id: bytes32,
+ votes_sum: uint64,
+ total_votes: uint64,
+ proposed_puzzle_hash: bytes32,
+) -> Program:
+ """
+ spend_or_update_flag can take on the following values, ranked from safest to most dangerous:
+ s for spend only
+ u for update only
+ d for dangerous (can do anything)
+ """
+ lockup_puzzle: Program = DAO_LOCKUP_MOD.curry(
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ DAO_FINISHED_STATE_HASH,
+ CAT_MOD_HASH,
+ cat_tail_hash,
+ )
+ # SINGLETON_STRUCT ; (SINGLETON_MOD_HASH (SINGLETON_ID . LAUNCHER_PUZZLE_HASH))
+ # PROPOSAL_TIMER_MOD_HASH ; proposal timer needs to know which proposal created it, AND
+ # CAT_MOD_HASH
+ # DAO_FINISHED_STATE_MOD_HASH
+ # TREASURY_MOD_HASH
+ # LOCKUP_SELF_HASH
+ # CAT_TAIL_HASH
+ # TREASURY_ID
+ # ; second hash
+ # SELF_HASH
+ # PROPOSED_PUZ_HASH ; this is what runs if this proposal is successful - the inner puzzle of this proposal
+ # YES_VOTES ; yes votes are +1, no votes don't tally - we compare yes_votes/total_votes at the end
+ # TOTAL_VOTES ; how many people responded
+ curry_one = DAO_PROPOSAL_MOD.curry(
+ DAO_PROPOSAL_TIMER_MOD_HASH,
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ CAT_MOD_HASH,
+ DAO_FINISHED_STATE_HASH,
+ DAO_TREASURY_MOD_HASH,
+ lockup_puzzle.get_tree_hash(),
+ cat_tail_hash,
+ treasury_id,
+ )
+ puzzle = curry_one.curry(
+ curry_one.get_tree_hash(),
+ proposal_id,
+ proposed_puzzle_hash,
+ votes_sum,
+ total_votes,
+ )
+ return puzzle
+
+
+def get_proposal_timer_puzzle(
+ cat_tail_hash: bytes32,
+ proposal_id: bytes32,
+ treasury_id: bytes32,
+) -> Program:
+ parent_singleton_struct: Program = Program.to((SINGLETON_MOD_HASH, (proposal_id, SINGLETON_LAUNCHER_HASH)))
+ lockup_puzzle: Program = DAO_LOCKUP_MOD.curry(
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ DAO_FINISHED_STATE_HASH,
+ CAT_MOD_HASH,
+ cat_tail_hash,
+ )
+ PROPOSAL_SELF_HASH = DAO_PROPOSAL_MOD.curry(
+ DAO_PROPOSAL_TIMER_MOD_HASH,
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ CAT_MOD_HASH,
+ DAO_FINISHED_STATE_HASH,
+ DAO_TREASURY_MOD_HASH,
+ lockup_puzzle.get_tree_hash(),
+ cat_tail_hash,
+ treasury_id,
+ ).get_tree_hash()
+
+ puzzle: Program = DAO_PROPOSAL_TIMER_MOD.curry(
+ PROPOSAL_SELF_HASH,
+ parent_singleton_struct,
+ )
+ return puzzle
+
+
+def get_treasury_rules_from_puzzle(puzzle_reveal: Optional[Program]) -> DAORules:
+ assert isinstance(puzzle_reveal, Program)
+ curried_args = uncurry_treasury(puzzle_reveal)
+ (
+ _DAO_TREASURY_MOD_HASH,
+ proposal_validator,
+ proposal_timelock,
+ soft_close_length,
+ attendance_required,
+ pass_percentage,
+ self_destruct_length,
+ oracle_spend_delay,
+ ) = curried_args
+ curried_args = uncurry_proposal_validator(proposal_validator)
+ (
+ SINGLETON_STRUCT,
+ PROPOSAL_SELF_HASH,
+ PROPOSAL_MINIMUM_AMOUNT,
+ PAYOUT_PUZHASH,
+ ) = curried_args.as_iter()
+ return DAORules(
+ uint64(proposal_timelock.as_int()),
+ uint64(soft_close_length.as_int()),
+ uint64(attendance_required.as_int()),
+ uint64(pass_percentage.as_int()),
+ uint64(self_destruct_length.as_int()),
+ uint64(oracle_spend_delay.as_int()),
+ uint64(PROPOSAL_MINIMUM_AMOUNT.as_int()),
+ )
+
+
+# This takes the treasury puzzle and treasury solution, not the full puzzle and full solution
+# This also returns the treasury puzzle and not the full puzzle
+def get_new_puzzle_from_treasury_solution(puzzle_reveal: Program, solution: Program) -> Optional[Program]:
+ if solution.rest().rest().first() != Program.to(0):
+ # Proposal Spend
+ mod, curried_args = solution.at("rrf").uncurry()
+ if mod == DAO_UPDATE_PROPOSAL_MOD:
+ (
+ DAO_TREASURY_MOD_HASH,
+ DAO_VALIDATOR_MOD_HASH,
+ TREASURY_SINGLETON_STRUCT,
+ PROPOSAL_SELF_HASH,
+ proposal_minimum_amount,
+ PROPOSAL_EXCESS_PAYOUT_PUZ_HASH,
+ proposal_timelock,
+ soft_close_length,
+ attendance_required,
+ pass_percentage,
+ self_destruct_length,
+ oracle_spend_delay,
+ ) = curried_args.as_iter()
+ new_validator = DAO_PROPOSAL_VALIDATOR_MOD.curry(
+ TREASURY_SINGLETON_STRUCT, PROPOSAL_SELF_HASH, proposal_minimum_amount, PROPOSAL_EXCESS_PAYOUT_PUZ_HASH
+ )
+ return DAO_TREASURY_MOD.curry(
+ DAO_TREASURY_MOD_HASH,
+ new_validator,
+ proposal_timelock,
+ soft_close_length,
+ attendance_required,
+ pass_percentage,
+ self_destruct_length,
+ oracle_spend_delay,
+ )
+ else:
+ return puzzle_reveal
+ else:
+ # Oracle Spend - treasury is unchanged
+ return puzzle_reveal
+
+
+# This takes the proposal puzzle and proposal solution, not the full puzzle and full solution
+# This also returns the proposal puzzle and not the full puzzle
+def get_new_puzzle_from_proposal_solution(puzzle_reveal: Program, solution: Program) -> Optional[Program]:
+ # Check if soft_close_length is in solution. If not, then add votes, otherwise close proposal
+ if len(solution.as_python()) == 1:
+ return puzzle_reveal # we're finished, shortcut this function
+
+ if solution.at("rrrrrrf") == Program.to(0):
+ c_a, curried_args = uncurry_proposal(puzzle_reveal)
+ assert isinstance(curried_args, Program)
+ (
+ DAO_PROPOSAL_TIMER_MOD_HASH,
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_PUZHASH,
+ CAT_MOD_HASH,
+ DAO_FINISHED_STATE_HASH,
+ DAO_TREASURY_MOD_HASH,
+ lockup_self_hash,
+ cat_tail_hash,
+ treasury_id,
+ ) = curried_args.as_iter()
+ assert isinstance(c_a, Program)
+ (
+ curry_one,
+ proposal_id,
+ proposed_puzzle_hash,
+ yes_votes,
+ total_votes,
+ ) = c_a.as_iter()
+
+ added_votes = 0
+ for vote_amount in solution.first().as_iter():
+ added_votes += vote_amount.as_int()
+
+ new_total_votes = total_votes.as_int() + added_votes
+
+ if solution.at("rf") == Program.to(0):
+ # Vote Type: NO
+ new_yes_votes = yes_votes.as_int()
+ else:
+ # Vote Type: YES
+ new_yes_votes = yes_votes.as_int() + added_votes
+ return get_proposal_puzzle(
+ proposal_id=proposal_id.as_atom(),
+ cat_tail_hash=cat_tail_hash.as_atom(),
+ treasury_id=treasury_id.as_atom(),
+ votes_sum=new_yes_votes,
+ total_votes=new_total_votes,
+ proposed_puzzle_hash=proposed_puzzle_hash.as_atom(),
+ )
+ else:
+ # we are in the finished state, puzzle is the same as ever
+ mod, currieds = puzzle_reveal.uncurry() # uncurry to self_hash
+ # check if our parent was the last non-finished state
+ if mod.uncurry()[0] == DAO_PROPOSAL_MOD:
+ c_a, curried_args = uncurry_proposal(puzzle_reveal)
+ (
+ DAO_PROPOSAL_TIMER_MOD_HASH,
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_PUZHASH,
+ CAT_MOD_HASH,
+ DAO_FINISHED_STATE_HASH,
+ DAO_TREASURY_MOD_HASH,
+ lockup_self_hash,
+ cat_tail_hash,
+ treasury_id,
+ ) = curried_args.as_iter()
+ (
+ curry_one,
+ proposal_id,
+ proposed_puzzle_hash,
+ yes_votes,
+ total_votes,
+ ) = c_a.as_iter()
+ else: # pragma: no cover
+ SINGLETON_STRUCT, dao_finished_hash = currieds.as_iter()
+ proposal_id = SINGLETON_STRUCT.rest().first()
+ return get_finished_state_inner_puzzle(bytes32(proposal_id.as_atom()))
+
+
+def get_finished_state_inner_puzzle(proposal_id: bytes32) -> Program:
+ singleton_struct: Program = Program.to((SINGLETON_MOD_HASH, (proposal_id, SINGLETON_LAUNCHER_HASH)))
+ finished_inner_puz: Program = DAO_FINISHED_STATE.curry(singleton_struct, DAO_FINISHED_STATE_HASH)
+ return finished_inner_puz
+
+
+def get_finished_state_puzzle(proposal_id: bytes32) -> Program:
+ return curry_singleton(proposal_id, get_finished_state_inner_puzzle(proposal_id))
+
+
+def get_proposed_puzzle_reveal_from_solution(solution: Program) -> Program:
+ prog = Program.from_bytes(bytes(solution))
+ return prog.at("rrfrrrrrf")
+
+
+def get_asset_id_from_puzzle(puzzle: Program) -> Optional[bytes32]:
+ mod, curried_args = puzzle.uncurry()
+ if mod == MOD: # pragma: no cover
+ return None
+ elif mod == CAT_MOD:
+ return bytes32(curried_args.at("rf").as_atom())
+ elif mod == SINGLETON_MOD: # pragma: no cover
+ return bytes32(curried_args.at("frf").as_atom())
+ else:
+ raise ValueError("DAO received coin with unknown puzzle") # pragma: no cover
+
+
+def uncurry_proposal_validator(proposal_validator_program: Program) -> Program:
+ try:
+ mod, curried_args = proposal_validator_program.uncurry()
+ except ValueError as e: # pragma: no cover
+ log.debug("Cannot uncurry treasury puzzle: error: %s", e)
+ raise e
+
+ if mod != DAO_PROPOSAL_VALIDATOR_MOD: # pragma: no cover
+ raise ValueError("Not a Treasury mod.")
+ return curried_args
+
+
+def uncurry_treasury(treasury_puzzle: Program) -> List[Program]:
+ try:
+ mod, curried_args = treasury_puzzle.uncurry()
+ except ValueError as e: # pragma: no cover
+ log.debug("Cannot uncurry treasury puzzle: error: %s", e)
+ raise e
+
+ if mod != DAO_TREASURY_MOD: # pragma: no cover
+ raise ValueError("Not a Treasury mod.")
+ return list(curried_args.as_iter())
+
+
+def uncurry_proposal(proposal_puzzle: Program) -> Tuple[Program, Program]:
+ try:
+ mod, curried_args = proposal_puzzle.uncurry()
+ except ValueError as e: # pragma: no cover
+ log.debug("Cannot uncurry proposal puzzle: error: %s", e)
+ raise e
+ try:
+ mod, c_a = mod.uncurry()
+ except ValueError as e: # pragma: no cover
+ log.debug("Cannot uncurry lockup puzzle: error: %s", e)
+ raise e
+ if mod != DAO_PROPOSAL_MOD:
+ raise ValueError("Not a dao proposal mod.")
+ return curried_args, c_a
+
+
+def uncurry_lockup(lockup_puzzle: Program) -> Tuple[Program, Program]:
+ try:
+ mod, curried_args = lockup_puzzle.uncurry()
+ except ValueError as e: # pragma: no cover
+ log.debug("Cannot uncurry lockup puzzle: error: %s", e)
+ raise e
+ try:
+ mod, c_a = mod.uncurry()
+ except ValueError as e: # pragma: no cover
+ log.debug("Cannot uncurry lockup puzzle: error: %s", e)
+ raise e
+ if mod != DAO_LOCKUP_MOD:
+ log.debug("Puzzle is not a dao cat lockup mod")
+ return curried_args, c_a
+
+
+# This is the proposed puzzle
+def get_proposal_args(puzzle: Program) -> Tuple[ProposalType, Program]:
+ try:
+ mod, curried_args = puzzle.uncurry()
+ except ValueError as e: # pragma: no cover
+ log.debug("Cannot uncurry spend puzzle: error: %s", e)
+ raise e
+ if mod == SPEND_P2_SINGLETON_MOD:
+ return ProposalType.SPEND, curried_args
+ elif mod == DAO_UPDATE_PROPOSAL_MOD:
+ return ProposalType.UPDATE, curried_args
+ else:
+ raise ValueError("Unrecognised proposal type")
+
+
+def generate_cat_tail(genesis_coin_id: bytes32, treasury_id: bytes32) -> Program:
+ dao_cat_launcher = create_cat_launcher_for_singleton_id(treasury_id).get_tree_hash()
+ puzzle = DAO_CAT_TAIL.curry(genesis_coin_id, dao_cat_launcher)
+ return puzzle
+
+
+def curry_singleton(singleton_id: bytes32, innerpuz: Program) -> Program:
+ singleton_struct = Program.to((SINGLETON_MOD_HASH, (singleton_id, SINGLETON_LAUNCHER_HASH)))
+ return SINGLETON_MOD.curry(singleton_struct, innerpuz)
+
+
+# This is for use in the WalletStateManager to determine the type of coin received
+def match_treasury_puzzle(mod: Program, curried_args: Program) -> Optional[Iterator[Program]]:
+ """
+ Given a puzzle test if it's a Treasury, if it is, return the curried arguments
+ :param mod: Puzzle
+ :param curried_args: Puzzle
+ :return: Curried parameters
+ """
+ try:
+ if mod == SINGLETON_MOD:
+ mod, curried_args = curried_args.rest().first().uncurry()
+ if mod == DAO_TREASURY_MOD:
+ return curried_args.first().as_iter() # type: ignore[no-any-return]
+ except ValueError: # pragma: no cover
+ import traceback
+
+ print(f"exception: {traceback.format_exc()}")
+ return None
+
+
+# This is for use in the WalletStateManager to determine the type of coin received
+def match_proposal_puzzle(mod: Program, curried_args: Program) -> Optional[Iterator[Program]]:
+ """
+ Given a puzzle test if it's a Proposal, if it is, return the curried arguments
+ :param curried_args: Puzzle
+ :return: Curried parameters
+ """
+ try:
+ if mod == SINGLETON_MOD:
+ c_a, curried_args = uncurry_proposal(curried_args.rest().first())
+ assert c_a is not None and curried_args is not None
+ ret = chain(c_a.as_iter(), curried_args.as_iter())
+ return ret
+ except ValueError:
+ import traceback
+
+ print(f"exception: {traceback.format_exc()}")
+ return None
+
+
+def match_finished_puzzle(mod: Program, curried_args: Program) -> Optional[Iterator[Program]]:
+ """
+ Given a puzzle test if it's a Proposal, if it is, return the curried arguments
+ :param curried_args: Puzzle
+ :return: Curried parameters
+ """
+ try:
+ if mod == SINGLETON_MOD:
+ mod, curried_args = curried_args.rest().first().uncurry()
+ if mod == DAO_FINISHED_STATE:
+ return curried_args.as_iter() # type: ignore[no-any-return]
+ except ValueError: # pragma: no cover
+ import traceback
+
+ print(f"exception: {traceback.format_exc()}")
+ return None
+
+
+# This is used in WSM to determine whether we have a dao funding spend
+def match_funding_puzzle(
+ uncurried: UncurriedPuzzle, solution: Program, coin: Coin, dao_ids: List[bytes32] = []
+) -> Optional[bool]:
+ if not dao_ids:
+ return None
+ try:
+ if uncurried.mod == CAT_MOD:
+ conditions = solution.at("frfr").as_iter()
+ elif uncurried.mod == MOD:
+ conditions = solution.at("rfr").as_iter()
+ elif uncurried.mod == SINGLETON_MOD:
+ inner_puz, _ = uncurried.args.at("rf").uncurry()
+ if inner_puz == DAO_TREASURY_MOD:
+ delegated_puz = solution.at("rrfrrf")
+ delegated_mod, delegated_args = delegated_puz.uncurry()
+ if delegated_puz.uncurry()[0] == SPEND_P2_SINGLETON_MOD:
+ if coin.puzzle_hash == delegated_args.at("rrrrf").as_atom(): # pragma: no cover
+ return True
+ return None # pragma: no cover
+ else:
+ return None
+ fund_puzhashes = [get_p2_singleton_puzhash(dao_id) for dao_id in dao_ids]
+ for cond in conditions:
+ if (cond.list_len() == 4) and (cond.first().as_int() == 51):
+ if cond.at("rrrff") in fund_puzhashes:
+ return True
+ except (ValueError, EvalError):
+ import traceback
+
+ print(f"exception: {traceback.format_exc()}")
+ return None
+
+
+def match_dao_cat_puzzle(uncurried: UncurriedPuzzle) -> Optional[Iterator[Program]]:
+ try:
+ if uncurried.mod == CAT_MOD:
+ arg_list = list(uncurried.args.as_iter())
+ inner_puz = get_innerpuz_from_lockup_puzzle(uncurried.args.at("rrf"))
+ if inner_puz is not None:
+ dao_cat_args: Iterator[Program] = Program.to(arg_list).as_iter()
+ return dao_cat_args
+ except ValueError:
+ import traceback
+
+ print(f"exception: {traceback.format_exc()}")
+ return None
+
+
+def generate_simple_proposal_innerpuz(
+ treasury_id: bytes32,
+ recipient_puzhashes: List[bytes32],
+ amounts: List[uint64],
+ asset_types: List[Optional[bytes32]] = [None],
+) -> Program:
+ if len(recipient_puzhashes) != len(amounts) != len(asset_types): # pragma: no cover
+ raise ValueError("Mismatch in the number of recipients, amounts, or asset types")
+ xch_conds = []
+ cat_conds = []
+ for recipient_puzhash, amount, asset_type in zip(recipient_puzhashes, amounts, asset_types):
+ if asset_type:
+ cat_conds.append([asset_type, [[51, recipient_puzhash, amount]]])
+ else:
+ xch_conds.append([51, recipient_puzhash, amount])
+ puzzle = get_spend_p2_singleton_puzzle(treasury_id, Program.to(xch_conds), Program.to(cat_conds))
+ return puzzle
+
+
+async def generate_update_proposal_innerpuz(
+ current_treasury_innerpuz: Program,
+ new_dao_rules: DAORules,
+ new_proposal_validator: Optional[Program] = None,
+) -> Program:
+ if not new_proposal_validator:
+ assert isinstance(current_treasury_innerpuz, Program)
+ new_proposal_validator = get_proposal_validator(
+ current_treasury_innerpuz, new_dao_rules.proposal_minimum_amount
+ )
+ return get_update_proposal_puzzle(new_dao_rules, new_proposal_validator)
+
+
+async def generate_mint_proposal_innerpuz(
+ treasury_id: bytes32,
+ cat_tail_hash: bytes32,
+ amount_of_cats_to_create: uint64,
+ cats_new_innerpuzhash: bytes32,
+) -> Program:
+ if amount_of_cats_to_create % 2 == 1: # pragma: no cover
+ raise ValueError("Minting proposals must mint an even number of CATs")
+ cat_launcher = create_cat_launcher_for_singleton_id(treasury_id)
+
+ # cat_wallet: CATWallet = self.wallet_state_manager.wallets[self.dao_info.cat_wallet_id]
+ # cat_tail_hash = cat_wallet.cat_info.limitations_program_hash
+ eve_puz_hash = curry_cat_eve(cats_new_innerpuzhash)
+ full_puz = construct_cat_puzzle(CAT_MOD, cat_tail_hash, eve_puz_hash)
+ xch_conditions = [
+ [
+ 51,
+ cat_launcher.get_tree_hash(),
+ uint64(amount_of_cats_to_create),
+ [cats_new_innerpuzhash],
+ ], # create cat_launcher coin
+ [
+ 60,
+ Program.to([ProposalType.MINT.value, full_puz.get_tree_hash()]).get_tree_hash(),
+ ], # make an announcement for the launcher to assert
+ ]
+ puzzle = get_spend_p2_singleton_puzzle(treasury_id, Program.to(xch_conditions), [])
+ return puzzle
diff --git a/chia/wallet/dao_wallet/dao_wallet.py b/chia/wallet/dao_wallet/dao_wallet.py
new file mode 100644
--- /dev/null
+++ b/chia/wallet/dao_wallet/dao_wallet.py
@@ -0,0 +1,2155 @@
+from __future__ import annotations
+
+import copy
+import dataclasses
+import json
+import logging
+import re
+import time
+from secrets import token_bytes
+from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Set, Tuple, Union, cast
+
+from blspy import AugSchemeMPL, G1Element, G2Element
+from clvm.casts import int_from_bytes
+
+import chia.wallet.singleton
+from chia.full_node.full_node_api import FullNodeAPI
+from chia.protocols.wallet_protocol import CoinState, RequestBlockHeader, RespondBlockHeader
+from chia.server.ws_connection import WSChiaConnection
+from chia.types.announcement import Announcement
+from chia.types.blockchain_format.coin import Coin
+from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_spend import CoinSpend
+from chia.types.condition_opcodes import ConditionOpcode
+from chia.types.spend_bundle import SpendBundle
+from chia.util.ints import uint32, uint64, uint128
+from chia.wallet import singleton
+from chia.wallet.cat_wallet.cat_utils import CAT_MOD, SpendableCAT, construct_cat_puzzle
+from chia.wallet.cat_wallet.cat_utils import get_innerpuzzle_from_puzzle as get_innerpuzzle_from_cat_puzzle
+from chia.wallet.cat_wallet.cat_utils import unsigned_spend_bundle_for_spendable_cats
+from chia.wallet.cat_wallet.cat_wallet import CATWallet
+from chia.wallet.cat_wallet.dao_cat_wallet import DAOCATWallet
+from chia.wallet.coin_selection import select_coins
+from chia.wallet.conditions import Condition, parse_timelock_info
+from chia.wallet.dao_wallet.dao_info import DAOInfo, DAORules, ProposalInfo, ProposalType
+from chia.wallet.dao_wallet.dao_utils import (
+ DAO_FINISHED_STATE,
+ DAO_PROPOSAL_MOD,
+ DAO_TREASURY_MOD,
+ SINGLETON_LAUNCHER,
+ create_cat_launcher_for_singleton_id,
+ curry_cat_eve,
+ curry_singleton,
+ generate_cat_tail,
+ get_active_votes_from_lockup_puzzle,
+ get_asset_id_from_puzzle,
+ get_dao_rules_from_update_proposal,
+ get_finished_state_inner_puzzle,
+ get_finished_state_puzzle,
+ get_innerpuz_from_lockup_puzzle,
+ get_new_puzzle_from_proposal_solution,
+ get_new_puzzle_from_treasury_solution,
+ get_p2_singleton_puzhash,
+ get_p2_singleton_puzzle,
+ get_proposal_args,
+ get_proposal_puzzle,
+ get_proposal_timer_puzzle,
+ get_proposed_puzzle_reveal_from_solution,
+ get_treasury_puzzle,
+ get_treasury_rules_from_puzzle,
+ match_funding_puzzle,
+ uncurry_proposal,
+ uncurry_treasury,
+)
+from chia.wallet.lineage_proof import LineageProof
+from chia.wallet.singleton import (
+ get_inner_puzzle_from_singleton,
+ get_most_recent_singleton_coin_from_coin_spend,
+ get_singleton_id_from_puzzle,
+ get_singleton_struct_for_id,
+)
+from chia.wallet.singleton_record import SingletonRecord
+from chia.wallet.transaction_record import TransactionRecord
+from chia.wallet.uncurried_puzzle import uncurry_puzzle
+from chia.wallet.util.transaction_type import TransactionType
+from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG, CoinSelectionConfig, TXConfig
+from chia.wallet.util.wallet_sync_utils import fetch_coin_spend
+from chia.wallet.util.wallet_types import WalletType
+from chia.wallet.wallet import Wallet
+from chia.wallet.wallet_coin_record import WalletCoinRecord
+from chia.wallet.wallet_info import WalletInfo
+
+
+class DAOWallet:
+ """
+ This is a wallet in the sense that it conforms to the interface needed by WalletStateManager.
+ It is not a user-facing wallet. A user cannot spend or receive XCH though a wallet of this type.
+
+ Wallets of type CAT and DAO_CAT are the user-facing wallets which hold the voting tokens a user
+ owns. The DAO Wallet is used for state-tracking of the Treasury Singleton and its associated
+ Proposals.
+
+ State change Spends (spends this user creates, either from DAOWallet or DAOCATWallet:
+ * Create a proposal
+ * Add more votes to a proposal
+ * Lock / Unlock voting tokens
+ * Collect finished state of a Proposal - spend to read the oracle result and Get our CAT coins back
+ * Anyone can send money to the Treasury, whether in possession of a voting CAT or not
+
+ Incoming spends we listen for:
+ * Update Treasury state if treasury is spent
+ * Hear about a finished proposal
+ * Hear about a new proposal -- check interest threshold (how many votes)
+ * Get Updated Proposal Data
+ """
+
+ if TYPE_CHECKING:
+ from chia.wallet.wallet_protocol import WalletProtocol
+
+ _protocol_check: ClassVar[WalletProtocol[DAOInfo]] = cast("DAOWallet", None)
+
+ wallet_state_manager: Any
+ log: logging.Logger
+ wallet_info: WalletInfo
+ dao_info: DAOInfo
+ dao_rules: DAORules
+ standard_wallet: Wallet
+ wallet_id: uint32
+
+ @staticmethod
+ async def create_new_dao_and_wallet(
+ wallet_state_manager: Any,
+ wallet: Wallet,
+ amount_of_cats: uint64,
+ dao_rules: DAORules,
+ tx_config: TXConfig,
+ filter_amount: uint64 = uint64(1),
+ name: Optional[str] = None,
+ fee: uint64 = uint64(0),
+ fee_for_cat: uint64 = uint64(0),
+ ) -> DAOWallet:
+ """
+ Create a brand new DAO wallet
+ This must be called under the wallet state manager lock
+ :param wallet_state_manager: Wallet state manager
+ :param wallet: Standard wallet
+ :param amount_of_cats: Initial amount of voting CATs
+ :param dao_rules: The rules which govern the DAO
+ :param filter_amount: Min votes to see proposal (user defined)
+ :param name: Wallet name
+ :param fee: transaction fee
+ :param fee_for_cat: transaction fee for creating the CATs
+ :return: DAO wallet
+ """
+
+ self = DAOWallet()
+ self.wallet_state_manager = wallet_state_manager
+ if name is None:
+ name = self.generate_wallet_name()
+
+ self.standard_wallet = wallet
+ self.log = logging.getLogger(name if name else __name__)
+ std_wallet_id = self.standard_wallet.wallet_id
+ bal = await wallet_state_manager.get_confirmed_balance_for_wallet(std_wallet_id)
+ if amount_of_cats > bal:
+ raise ValueError(f"Your balance of {bal} mojos is not enough to create {amount_of_cats} CATs")
+
+ self.dao_info = DAOInfo(
+ treasury_id=bytes32([0] * 32),
+ cat_wallet_id=uint32(0),
+ dao_cat_wallet_id=uint32(0),
+ proposals_list=[],
+ parent_info=[],
+ current_treasury_coin=None,
+ current_treasury_innerpuz=None,
+ singleton_block_height=uint32(0),
+ filter_below_vote_amount=filter_amount,
+ assets=[],
+ current_height=uint64(0),
+ )
+ self.dao_rules = dao_rules
+ info_as_string = json.dumps(self.dao_info.to_json_dict())
+ self.wallet_info = await wallet_state_manager.user_store.create_wallet(
+ name, WalletType.DAO.value, info_as_string
+ )
+ self.wallet_id = self.wallet_info.id
+ std_wallet_id = self.standard_wallet.wallet_id
+
+ try:
+ await self.generate_new_dao(
+ amount_of_cats,
+ tx_config,
+ fee=fee,
+ fee_for_cat=fee_for_cat,
+ )
+ except Exception as e_info: # pragma: no cover
+ await wallet_state_manager.user_store.delete_wallet(self.id())
+ self.log.exception(f"Failed to create dao wallet: {e_info}")
+ raise
+
+ await self.wallet_state_manager.add_new_wallet(self)
+
+ # Now the dao wallet is created we can create the dao_cat wallet
+ cat_wallet: CATWallet = self.wallet_state_manager.wallets[self.dao_info.cat_wallet_id]
+ cat_tail = cat_wallet.cat_info.limitations_program_hash
+ new_dao_cat_wallet = await DAOCATWallet.get_or_create_wallet_for_cat(
+ self.wallet_state_manager, self.standard_wallet, cat_tail.hex()
+ )
+ dao_cat_wallet_id = new_dao_cat_wallet.wallet_info.id
+ dao_info = dataclasses.replace(
+ self.dao_info, cat_wallet_id=cat_wallet.id(), dao_cat_wallet_id=dao_cat_wallet_id
+ )
+ await self.save_info(dao_info)
+
+ return self
+
+ @staticmethod
+ async def create_new_dao_wallet_for_existing_dao(
+ wallet_state_manager: Any,
+ main_wallet: Wallet,
+ treasury_id: bytes32,
+ filter_amount: uint64 = uint64(1),
+ name: Optional[str] = None,
+ ) -> DAOWallet:
+ """
+ Create a DAO wallet for existing DAO
+ :param wallet_state_manager: Wallet state manager
+ :param main_wallet: Standard wallet
+ :param treasury_id: The singleton ID of the DAO treasury coin
+ :param filter_amount: Min votes to see proposal (user defined)
+ :param name: Wallet name
+ :return: DAO wallet
+ """
+ self = DAOWallet()
+ self.wallet_state_manager = wallet_state_manager
+ if name is None:
+ name = self.generate_wallet_name()
+
+ self.standard_wallet = main_wallet
+ self.log = logging.getLogger(name if name else __name__)
+ self.log.info("Creating DAO wallet for existent DAO ...")
+ self.dao_info = DAOInfo(
+ treasury_id=treasury_id,
+ cat_wallet_id=uint32(0),
+ dao_cat_wallet_id=uint32(0),
+ proposals_list=[],
+ parent_info=[],
+ current_treasury_coin=None,
+ current_treasury_innerpuz=None,
+ singleton_block_height=uint32(0),
+ filter_below_vote_amount=filter_amount,
+ assets=[],
+ current_height=uint64(0),
+ )
+ info_as_string = json.dumps(self.dao_info.to_json_dict())
+ self.wallet_info = await wallet_state_manager.user_store.create_wallet(
+ name, WalletType.DAO.value, info_as_string
+ )
+ await self.wallet_state_manager.add_new_wallet(self)
+ await self.resync_treasury_state()
+ await self.save_info(self.dao_info)
+ self.wallet_id = self.wallet_info.id
+
+ # Now the dao wallet is created we can create the dao_cat wallet
+ cat_wallet: CATWallet = self.wallet_state_manager.wallets[self.dao_info.cat_wallet_id]
+ cat_tail = cat_wallet.cat_info.limitations_program_hash
+ new_dao_cat_wallet = await DAOCATWallet.get_or_create_wallet_for_cat(
+ self.wallet_state_manager, self.standard_wallet, cat_tail.hex()
+ )
+ dao_cat_wallet_id = new_dao_cat_wallet.wallet_info.id
+ dao_info = dataclasses.replace(
+ self.dao_info, cat_wallet_id=cat_wallet.id(), dao_cat_wallet_id=dao_cat_wallet_id
+ )
+ await self.save_info(dao_info)
+
+ # add treasury id to interested puzzle hashes. This is hinted in funding coins so we can track them
+ funding_inner_hash = get_p2_singleton_puzhash(self.dao_info.treasury_id)
+ await self.wallet_state_manager.add_interested_puzzle_hashes(
+ [self.dao_info.treasury_id, funding_inner_hash], [self.id(), self.id()]
+ )
+ return self
+
+ @staticmethod
+ async def create(
+ wallet_state_manager: Any,
+ wallet: Wallet,
+ wallet_info: WalletInfo,
+ name: Optional[str] = None,
+ ) -> DAOWallet:
+ """
+ Create a DID wallet based on the local database
+ :param wallet_state_manager: Wallet state manager
+ :param wallet: Standard wallet
+ :param wallet_info: Serialized WalletInfo
+ :param name: Wallet name
+ :return:
+ """
+ self = DAOWallet()
+ self.log = logging.getLogger(name if name else __name__)
+ self.wallet_state_manager = wallet_state_manager
+ self.wallet_info = wallet_info
+ self.wallet_id = wallet_info.id
+ self.standard_wallet = wallet
+ self.dao_info = DAOInfo.from_json_dict(json.loads(wallet_info.data))
+ self.dao_rules = get_treasury_rules_from_puzzle(self.dao_info.current_treasury_innerpuz)
+ return self
+
+ @classmethod
+ def type(cls) -> WalletType:
+ return WalletType.DAO
+
+ def id(self) -> uint32:
+ return self.wallet_info.id
+
+ async def set_name(self, new_name: str) -> None:
+ new_info = dataclasses.replace(self.wallet_info, name=new_name)
+ self.wallet_info = new_info
+ await self.wallet_state_manager.user_store.update_wallet(self.wallet_info)
+
+ def get_name(self) -> str:
+ return self.wallet_info.name
+
+ async def match_hinted_coin(self, coin: Coin, hint: bytes32) -> bool:
+ raise NotImplementedError("Method not implemented for DAO Wallet") # pragma: no cover
+
+ def puzzle_hash_for_pk(self, pubkey: G1Element) -> bytes32:
+ raise NotImplementedError("puzzle_hash_for_pk is not available in DAO wallets") # pragma: no cover
+
+ async def get_new_p2_inner_hash(self) -> bytes32:
+ puzzle = await self.get_new_p2_inner_puzzle()
+ return puzzle.get_tree_hash()
+
+ async def get_new_p2_inner_puzzle(self) -> Program:
+ return await self.standard_wallet.get_new_puzzle()
+
+ def get_parent_for_coin(self, coin: Coin) -> Optional[LineageProof]:
+ parent_info = None
+ for name, ccparent in self.dao_info.parent_info:
+ if name == coin.parent_coin_info:
+ parent_info = ccparent
+ return parent_info
+
+ async def get_max_send_amount(self, records: Optional[Set[WalletCoinRecord]] = None) -> uint128:
+ return uint128(0) # pragma: no cover
+
+ async def get_spendable_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128:
+ # No spendable or receivable value
+ return uint128(1)
+
+ async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128:
+ # No spendable or receivable value
+ return uint128(1)
+
+ async def select_coins(
+ self,
+ amount: uint64,
+ coin_selection_config: CoinSelectionConfig,
+ ) -> Set[Coin]:
+ """
+ Returns a set of coins that can be used for generating a new transaction.
+ Note: Must be called under wallet state manager lock
+ There is no need for max/min coin amount or excluded amount becuase the dao treasury should
+ always be a single coin with amount 1
+ """
+ spendable_amount: uint128 = await self.get_spendable_balance()
+ if amount > spendable_amount:
+ self.log.warning(f"Can't select {amount}, from spendable {spendable_amount} for wallet id {self.id()}")
+ return set()
+
+ spendable_coins: List[WalletCoinRecord] = list(
+ await self.wallet_state_manager.get_spendable_coins_for_wallet(self.wallet_info.id)
+ )
+
+ # Try to use coins from the store, if there isn't enough of "unused"
+ # coins use change coins that are not confirmed yet
+ unconfirmed_removals: Dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet(
+ self.wallet_info.id
+ )
+ coins = await select_coins(
+ spendable_amount,
+ coin_selection_config,
+ spendable_coins,
+ unconfirmed_removals,
+ self.log,
+ uint128(amount),
+ )
+ assert sum(c.amount for c in coins) >= amount
+ return coins
+
+ async def get_pending_change_balance(self) -> uint64:
+ # No spendable or receivable value
+ return uint64(0)
+
+ async def get_unconfirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128:
+ # No spendable or receivable value
+ return uint128(1)
+
+ # if asset_id == None: then we get normal XCH
+ async def get_balance_by_asset_type(self, asset_id: Optional[bytes32] = None) -> uint128:
+ puzhash = get_p2_singleton_puzhash(self.dao_info.treasury_id, asset_id=asset_id)
+ records = await self.wallet_state_manager.coin_store.get_coin_records_by_puzzle_hash(puzhash)
+ return uint128(sum([cr.coin.amount for cr in records if not cr.spent]))
+
+ # if asset_id == None: then we get normal XCH
+ async def select_coins_for_asset_type(self, amount: uint64, asset_id: Optional[bytes32] = None) -> List[Coin]:
+ puzhash = get_p2_singleton_puzhash(self.dao_info.treasury_id, asset_id=asset_id)
+ records = await self.wallet_state_manager.coin_store.get_coin_records_by_puzzle_hash(puzhash)
+ # TODO: smarter coin selection algorithm
+ total = 0
+ coins = []
+ for record in records:
+ if not record.spent:
+ total += record.coin.amount
+ coins.append(record.coin)
+ if total >= amount:
+ break
+ if total < amount: # pragma: no cover
+ raise ValueError(f"Not enough of asset {asset_id}: {total} < {amount}")
+ return coins
+
+ async def coin_added(self, coin: Coin, height: uint32, peer: WSChiaConnection, coin_data: Optional[Any]) -> None:
+ """
+ Notification from wallet state manager that a coin has been received.
+ This can be either a treasury coin update or funds added to the treasury
+ """
+ self.log.info(f"DAOWallet.coin_added() called with the coin: {coin.name().hex()}:{coin}.")
+ wallet_node: Any = self.wallet_state_manager.wallet_node
+ peer = wallet_node.get_full_node_peer()
+ if peer is None: # pragma: no cover
+ raise ValueError("Could not find any peers to request puzzle and solution from")
+ try:
+ # Get the parent coin spend
+ cs = (await wallet_node.get_coin_state([coin.parent_coin_info], peer, height))[0]
+ parent_spend = await fetch_coin_spend(cs.spent_height, cs.coin, peer)
+
+ # check if it's a singleton and add to singleton_store
+ singleton_id = get_singleton_id_from_puzzle(parent_spend.puzzle_reveal)
+
+ if singleton_id:
+ await self.wallet_state_manager.singleton_store.add_spend(self.id(), parent_spend, height)
+ puzzle = Program.from_bytes(bytes(parent_spend.puzzle_reveal))
+ solution = Program.from_bytes(bytes(parent_spend.solution))
+ uncurried = uncurry_puzzle(puzzle)
+ matched_funding_puz = match_funding_puzzle(uncurried, solution, coin, [self.dao_info.treasury_id])
+ if matched_funding_puz:
+ # funding coin
+ xch_funds_puzhash = get_p2_singleton_puzhash(self.dao_info.treasury_id, asset_id=None)
+ if coin.puzzle_hash == xch_funds_puzhash:
+ asset_id = None
+ else:
+ asset_id = get_asset_id_from_puzzle(parent_spend.puzzle_reveal.to_program())
+ if asset_id not in self.dao_info.assets:
+ new_asset_list = self.dao_info.assets.copy()
+ new_asset_list.append(asset_id)
+ dao_info = dataclasses.replace(self.dao_info, assets=new_asset_list)
+ await self.save_info(dao_info)
+ await self.wallet_state_manager.add_interested_puzzle_hashes([coin.puzzle_hash], [self.id()])
+ self.log.info(f"DAO funding coin added: {coin.name().hex()}:{coin}. Asset ID: {asset_id}")
+ except Exception as e: # pragma: no cover
+ self.log.exception(f"Error occurred during dao wallet coin addition: {e}")
+ return
+
+ def get_cat_tail_hash(self) -> bytes32:
+ cat_wallet: CATWallet = self.wallet_state_manager.wallets[self.dao_info.cat_wallet_id]
+ return cat_wallet.cat_info.limitations_program_hash
+
+ async def adjust_filter_level(self, new_filter_level: uint64) -> None:
+ dao_info = dataclasses.replace(self.dao_info, filter_below_vote_amount=new_filter_level)
+ await self.save_info(dao_info)
+
+ async def clear_finished_proposals_from_memory(self) -> None:
+ dao_cat_wallet: DAOCATWallet = self.wallet_state_manager.wallets[self.dao_info.dao_cat_wallet_id]
+ new_list = [
+ prop_info
+ for prop_info in self.dao_info.proposals_list
+ if not prop_info.closed
+ or prop_info.closed is None
+ or any(prop_info.proposal_id in lci.active_votes for lci in dao_cat_wallet.dao_cat_info.locked_coins)
+ ]
+ dao_info = dataclasses.replace(self.dao_info, proposals_list=new_list)
+ await self.save_info(dao_info)
+ return
+
+ async def resync_treasury_state(self) -> None:
+ """
+ This is called during create_new_dao_wallet_for_existing_dao.
+ When we want to sync to an existing DAO, we provide the treasury coins singleton ID, and then trace all
+ the child coins until we reach the current DAO treasury coin. We use the puzzle reveal and solution to
+ get the current state of the DAO, and to work out what the tail of the DAO CAT token is.
+ This also captures all the proposals that have been created and their state.
+ """
+ parent_coin_id: bytes32 = self.dao_info.treasury_id
+ wallet_node: Any = self.wallet_state_manager.wallet_node
+ peer: WSChiaConnection = wallet_node.get_full_node_peer()
+ if peer is None: # pragma: no cover
+ raise ValueError("Could not find any peers to request puzzle and solution from")
+
+ parent_coin = None
+ parent_parent_coin = None
+ while True:
+ children = await wallet_node.fetch_children(parent_coin_id, peer)
+ if len(children) == 0:
+ break
+
+ children_state_list: List[CoinState] = [child for child in children if child.coin.amount % 2 == 1]
+ if len(children_state_list) == 0: # pragma: no cover
+ raise RuntimeError("Could not retrieve child_state")
+ children_state = children_state_list[0]
+ assert children_state is not None
+ child_coin = children_state.coin
+ if parent_coin is not None:
+ parent_parent_coin = parent_coin
+ parent_coin = child_coin
+ parent_coin_id = child_coin.name()
+
+ if parent_parent_coin is None: # pragma: no cover
+ raise RuntimeError("could not get parent_parent_coin of %s", children)
+
+ # get lineage proof of parent spend, and also current innerpuz
+ assert children_state.created_height
+ parent_spend = await fetch_coin_spend(children_state.created_height, parent_parent_coin, peer)
+ assert parent_spend is not None
+ parent_inner_puz = chia.wallet.singleton.get_inner_puzzle_from_singleton(
+ parent_spend.puzzle_reveal.to_program()
+ )
+ if parent_inner_puz is None: # pragma: no cover
+ raise ValueError("get_innerpuzzle_from_puzzle failed")
+
+ if parent_spend.puzzle_reveal.get_tree_hash() == child_coin.puzzle_hash:
+ current_inner_puz = parent_inner_puz
+ else: # pragma: no cover
+ # extract the treasury solution from the full singleton solution
+ inner_solution = parent_spend.solution.to_program().rest().rest().first()
+ # reconstruct the treasury puzzle
+ current_inner_puz = get_new_puzzle_from_treasury_solution(parent_inner_puz, inner_solution)
+ # set the treasury rules
+ self.dao_rules = get_treasury_rules_from_puzzle(current_inner_puz)
+
+ current_lineage_proof = LineageProof(
+ parent_parent_coin.parent_coin_info, parent_inner_puz.get_tree_hash(), parent_parent_coin.amount
+ )
+ await self.add_parent(parent_parent_coin.name(), current_lineage_proof)
+
+ # Hack to find the cat tail hash from the memo of the genesis spend
+ launcher_state = await wallet_node.get_coin_state([self.dao_info.treasury_id], peer)
+ genesis_coin_id = launcher_state[0].coin.parent_coin_info
+ genesis_state = await wallet_node.get_coin_state([genesis_coin_id], peer)
+ genesis_spend = await fetch_coin_spend(genesis_state[0].spent_height, genesis_state[0].coin, peer)
+ cat_tail_hash = None
+ conds = genesis_spend.solution.to_program().at("rfr").as_iter()
+ for cond in conds:
+ if (cond.first().as_atom() == ConditionOpcode.CREATE_COIN) and (
+ int_from_bytes(cond.at("rrf").as_atom()) == 1
+ ):
+ cat_tail_hash = bytes32(cond.at("rrrff").as_atom())
+ break
+ assert cat_tail_hash
+
+ cat_wallet: Optional[CATWallet] = None
+
+ # Get or create a cat wallet
+ for wallet_id in self.wallet_state_manager.wallets:
+ wallet = self.wallet_state_manager.wallets[wallet_id]
+ if wallet.type() == WalletType.CAT: # pragma: no cover
+ assert isinstance(wallet, CATWallet)
+ if wallet.cat_info.limitations_program_hash == cat_tail_hash:
+ cat_wallet = wallet
+ break
+ else:
+ # Didn't find a cat wallet, so create one
+ cat_wallet = await CATWallet.get_or_create_wallet_for_cat(
+ self.wallet_state_manager, self.standard_wallet, cat_tail_hash.hex()
+ )
+
+ assert cat_wallet is not None
+ cat_wallet_id = cat_wallet.wallet_info.id
+ dao_info = dataclasses.replace(
+ self.dao_info,
+ cat_wallet_id=uint32(cat_wallet_id),
+ dao_cat_wallet_id=uint32(0),
+ current_treasury_coin=child_coin,
+ current_treasury_innerpuz=current_inner_puz,
+ )
+ await self.save_info(dao_info)
+
+ future_parent = LineageProof(
+ child_coin.parent_coin_info,
+ dao_info.current_treasury_innerpuz.get_tree_hash(),
+ uint64(child_coin.amount),
+ )
+ await self.add_parent(child_coin.name(), future_parent)
+ assert self.dao_info.parent_info is not None
+
+ # get existing xch funds for treasury
+ xch_funds_puzhash = get_p2_singleton_puzhash(self.dao_info.treasury_id, asset_id=None)
+ await self.wallet_state_manager.add_interested_puzzle_hashes([xch_funds_puzhash], [self.id()])
+ await self.wallet_state_manager.add_interested_puzzle_hashes([self.dao_info.treasury_id], [self.id()])
+ await self.wallet_state_manager.add_interested_puzzle_hashes(
+ [self.dao_info.current_treasury_coin.puzzle_hash], [self.id()]
+ )
+
+ # Resync the wallet from when the treasury was created to get the existing funds
+ # TODO: Maybe split this out as an option for users since it may be slow?
+ if not wallet_node.is_trusted(peer):
+ # Untrusted nodes won't automatically send us the history of all the treasury and proposal coins,
+ # so we have to request them via sync_from_untrusted_close_to_peak
+ request = RequestBlockHeader(children_state.created_height)
+ response: Optional[RespondBlockHeader] = await peer.call_api(FullNodeAPI.request_block_header, request)
+ await wallet_node.sync_from_untrusted_close_to_peak(response.header_block, peer)
+
+ return
+
+ async def generate_new_dao(
+ self,
+ amount_of_cats_to_create: Optional[uint64],
+ tx_config: TXConfig,
+ cat_tail_hash: Optional[bytes32] = None,
+ fee: uint64 = uint64(0),
+ fee_for_cat: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> Optional[SpendBundle]:
+ """
+ Create a new DAO treasury using the dao_rules object. This does the first spend to create the launcher
+ and eve coins.
+ The eve spend has to be completed in a separate tx using 'submit_eve_spend' once the number of blocks required
+ by dao_rules.oracle_spend_delay has passed.
+ This must be called under the wallet state manager lock
+ """
+
+ if amount_of_cats_to_create is not None and amount_of_cats_to_create < 0: # pragma: no cover
+ raise ValueError("amount_of_cats must be >= 0, or None")
+ if (
+ amount_of_cats_to_create is None or amount_of_cats_to_create == 0
+ ) and cat_tail_hash is None: # pragma: no cover
+ raise ValueError("amount_of_cats must be > 0 or cat_tail_hash must be specified")
+ if (
+ amount_of_cats_to_create is not None and amount_of_cats_to_create > 0 and cat_tail_hash is not None
+ ): # pragma: no cover
+ raise ValueError("cannot create voting cats and use existing cat_tail_hash")
+ if self.dao_rules.pass_percentage > 10000 or self.dao_rules.pass_percentage < 0: # pragma: no cover
+ raise ValueError("proposal pass percentage must be between 0 and 10000")
+
+ if amount_of_cats_to_create is not None and amount_of_cats_to_create > 0:
+ coins = await self.standard_wallet.select_coins(
+ uint64(amount_of_cats_to_create + fee + 1),
+ tx_config.coin_selection_config,
+ )
+ else: # pragma: no cover
+ coins = await self.standard_wallet.select_coins(uint64(fee + 1), tx_config.coin_selection_config)
+
+ if coins is None: # pragma: no cover
+ return None
+ # origin is normal coin which creates launcher coin
+ origin = coins.copy().pop()
+
+ genesis_launcher_puz = SINGLETON_LAUNCHER
+ # launcher coin contains singleton launcher, launcher coin ID == singleton_id == treasury_id
+ launcher_coin = Coin(origin.name(), genesis_launcher_puz.get_tree_hash(), 1)
+
+ if cat_tail_hash is None:
+ assert amount_of_cats_to_create is not None
+ different_coins = await self.standard_wallet.select_coins(
+ uint64(amount_of_cats_to_create + fee_for_cat),
+ coin_selection_config=tx_config.coin_selection_config.override(
+ excluded_coin_ids=[*tx_config.coin_selection_config.excluded_coin_ids, origin.name()]
+ ),
+ )
+ cat_origin = different_coins.copy().pop()
+ assert origin.name() != cat_origin.name()
+ cat_tail = generate_cat_tail(cat_origin.name(), launcher_coin.name())
+ cat_tail_hash = cat_tail.get_tree_hash()
+
+ assert cat_tail_hash is not None
+
+ dao_info: DAOInfo = DAOInfo(
+ launcher_coin.name(),
+ self.dao_info.cat_wallet_id,
+ self.dao_info.dao_cat_wallet_id,
+ self.dao_info.proposals_list,
+ self.dao_info.parent_info,
+ None,
+ None,
+ uint32(0),
+ self.dao_info.filter_below_vote_amount,
+ self.dao_info.assets,
+ self.dao_info.current_height,
+ )
+ await self.save_info(dao_info)
+ new_cat_wallet = None
+ # This will also mint the coins
+ if amount_of_cats_to_create is not None and different_coins is not None:
+ cat_tail_info = {
+ "identifier": "genesis_by_id_or_singleton",
+ "treasury_id": launcher_coin.name(),
+ "coins": different_coins,
+ }
+ new_cat_wallet = await CATWallet.create_new_cat_wallet(
+ self.wallet_state_manager,
+ self.standard_wallet,
+ cat_tail_info,
+ amount_of_cats_to_create,
+ DEFAULT_TX_CONFIG,
+ fee=fee_for_cat,
+ )
+ assert new_cat_wallet is not None
+ else: # pragma: no cover
+ for wallet in self.wallet_state_manager.wallets:
+ if self.wallet_state_manager.wallets[wallet].type() == WalletType.CAT:
+ if self.wallet_state_manager.wallets[wallet].cat_info.limitations_program_hash == cat_tail_hash:
+ new_cat_wallet = self.wallet_state_manager.wallets[wallet]
+
+ assert new_cat_wallet is not None
+ cat_wallet_id = new_cat_wallet.wallet_info.id
+
+ assert cat_tail_hash == new_cat_wallet.cat_info.limitations_program_hash
+ await new_cat_wallet.set_tail_program(bytes(cat_tail).hex())
+ dao_info = DAOInfo(
+ self.dao_info.treasury_id,
+ cat_wallet_id,
+ self.dao_info.dao_cat_wallet_id,
+ self.dao_info.proposals_list,
+ self.dao_info.parent_info,
+ None,
+ None,
+ uint32(0),
+ self.dao_info.filter_below_vote_amount,
+ self.dao_info.assets,
+ self.dao_info.current_height,
+ )
+
+ await self.save_info(dao_info)
+
+ dao_treasury_puzzle = get_treasury_puzzle(self.dao_rules, launcher_coin.name(), cat_tail_hash)
+ full_treasury_puzzle = curry_singleton(launcher_coin.name(), dao_treasury_puzzle)
+ full_treasury_puzzle_hash = full_treasury_puzzle.get_tree_hash()
+
+ announcement_set: Set[Announcement] = set()
+ announcement_message = Program.to([full_treasury_puzzle_hash, 1, bytes(0x80)]).get_tree_hash()
+ announcement_set.add(Announcement(launcher_coin.name(), announcement_message))
+ tx_records: List[TransactionRecord] = await self.standard_wallet.generate_signed_transaction(
+ uint64(1),
+ genesis_launcher_puz.get_tree_hash(),
+ tx_config,
+ fee,
+ origin_id=origin.name(),
+ coins=set(coins),
+ coin_announcements_to_consume=announcement_set,
+ memos=[new_cat_wallet.cat_info.limitations_program_hash],
+ )
+ tx_record: TransactionRecord = tx_records[0]
+
+ genesis_launcher_solution = Program.to([full_treasury_puzzle_hash, 1, bytes(0x80)])
+
+ launcher_cs = CoinSpend(launcher_coin, genesis_launcher_puz, genesis_launcher_solution)
+ launcher_sb = SpendBundle([launcher_cs], AugSchemeMPL.aggregate([]))
+
+ launcher_proof = LineageProof(
+ bytes32(launcher_coin.parent_coin_info),
+ None,
+ uint64(launcher_coin.amount),
+ )
+ await self.add_parent(launcher_coin.name(), launcher_proof)
+
+ if tx_record is None or tx_record.spend_bundle is None: # pragma: no cover
+ return None
+
+ eve_coin = Coin(launcher_coin.name(), full_treasury_puzzle_hash, uint64(1))
+ dao_info = DAOInfo(
+ launcher_coin.name(),
+ cat_wallet_id,
+ self.dao_info.dao_cat_wallet_id,
+ self.dao_info.proposals_list,
+ self.dao_info.parent_info,
+ eve_coin,
+ dao_treasury_puzzle,
+ self.dao_info.singleton_block_height,
+ self.dao_info.filter_below_vote_amount,
+ self.dao_info.assets,
+ self.dao_info.current_height,
+ )
+ await self.save_info(dao_info)
+ eve_spend = await self.generate_treasury_eve_spend(dao_treasury_puzzle, eve_coin)
+
+ full_spend = SpendBundle.aggregate([tx_record.spend_bundle, launcher_sb, eve_spend])
+
+ treasury_record = TransactionRecord(
+ confirmed_at_height=uint32(0),
+ created_at_time=uint64(int(time.time())),
+ to_puzzle_hash=dao_treasury_puzzle.get_tree_hash(),
+ amount=uint64(1),
+ fee_amount=fee,
+ confirmed=False,
+ sent=uint32(10),
+ spend_bundle=full_spend,
+ additions=full_spend.additions(),
+ removals=full_spend.removals(),
+ wallet_id=self.id(),
+ sent_to=[],
+ trade_id=None,
+ type=uint32(TransactionType.INCOMING_TX.value),
+ name=bytes32(token_bytes()),
+ memos=[],
+ valid_times=parse_timelock_info(extra_conditions),
+ )
+ regular_record = dataclasses.replace(tx_record, spend_bundle=None)
+ await self.wallet_state_manager.add_pending_transaction(regular_record)
+ await self.wallet_state_manager.add_pending_transaction(treasury_record)
+
+ funding_inner_puzhash = get_p2_singleton_puzhash(self.dao_info.treasury_id)
+ await self.wallet_state_manager.add_interested_puzzle_hashes([funding_inner_puzhash], [self.id()])
+ await self.wallet_state_manager.add_interested_puzzle_hashes([launcher_coin.name()], [self.id()])
+ await self.wallet_state_manager.add_interested_coin_ids([launcher_coin.name()], [self.wallet_id])
+
+ await self.wallet_state_manager.add_interested_coin_ids([eve_coin.name()], [self.wallet_id])
+ return full_spend
+
+ async def generate_treasury_eve_spend(
+ self, inner_puz: Program, eve_coin: Coin, fee: uint64 = uint64(0)
+ ) -> SpendBundle:
+ """
+ Create the eve spend of the treasury
+ This can only be completed after a number of blocks > oracle_spend_delay have been farmed
+ """
+ if self.dao_info.current_treasury_innerpuz is None: # pragma: no cover
+ raise ValueError("generate_treasury_eve_spend called with nil self.dao_info.current_treasury_innerpuz")
+ full_treasury_puzzle = curry_singleton(self.dao_info.treasury_id, inner_puz)
+ launcher_id, launcher_proof = self.dao_info.parent_info[0]
+ assert launcher_proof
+ assert inner_puz
+ inner_sol = Program.to([0, 0, 0, 0, get_singleton_struct_for_id(launcher_id)])
+ fullsol = Program.to(
+ [
+ launcher_proof.to_program(),
+ eve_coin.amount,
+ inner_sol,
+ ]
+ )
+ eve_coin_spend = CoinSpend(eve_coin, full_treasury_puzzle, fullsol)
+ eve_spend_bundle = SpendBundle([eve_coin_spend], G2Element())
+
+ next_proof = LineageProof(
+ eve_coin.parent_coin_info,
+ inner_puz.get_tree_hash(),
+ uint64(eve_coin.amount),
+ )
+ next_coin = Coin(eve_coin.name(), eve_coin.puzzle_hash, eve_coin.amount)
+ await self.add_parent(eve_coin.name(), next_proof)
+ await self.wallet_state_manager.add_interested_coin_ids([next_coin.name()], [self.wallet_id])
+
+ dao_info = dataclasses.replace(self.dao_info, current_treasury_coin=next_coin)
+ await self.save_info(dao_info)
+ await self.wallet_state_manager.singleton_store.add_spend(self.id(), eve_coin_spend)
+ return eve_spend_bundle
+
+ async def generate_new_proposal(
+ self,
+ proposed_puzzle: Program,
+ tx_config: TXConfig,
+ vote_amount: Optional[uint64] = None,
+ fee: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> TransactionRecord:
+ dao_rules = get_treasury_rules_from_puzzle(self.dao_info.current_treasury_innerpuz)
+ coins = await self.standard_wallet.select_coins(
+ uint64(fee + dao_rules.proposal_minimum_amount),
+ tx_config.coin_selection_config,
+ )
+ if coins is None: # pragma: no cover
+ return None
+ # origin is normal coin which creates launcher coin
+ origin = coins.copy().pop()
+ genesis_launcher_puz = SINGLETON_LAUNCHER
+ # launcher coin contains singleton launcher, launcher coin ID == singleton_id == treasury_id
+ launcher_coin = Coin(origin.name(), genesis_launcher_puz.get_tree_hash(), dao_rules.proposal_minimum_amount)
+
+ cat_wallet: CATWallet = self.wallet_state_manager.wallets[self.dao_info.cat_wallet_id]
+
+ if vote_amount is None: # pragma: no cover
+ dao_cat_wallet = self.wallet_state_manager.get_wallet(
+ id=self.dao_info.dao_cat_wallet_id, required_type=DAOCATWallet
+ )
+ vote_amount = await dao_cat_wallet.get_votable_balance(include_free_cats=False)
+ assert vote_amount is not None
+ cat_tail_hash = cat_wallet.cat_info.limitations_program_hash
+ assert cat_tail_hash
+ dao_proposal_puzzle = get_proposal_puzzle(
+ proposal_id=launcher_coin.name(),
+ cat_tail_hash=cat_tail_hash,
+ treasury_id=self.dao_info.treasury_id,
+ votes_sum=uint64(0),
+ total_votes=uint64(0),
+ proposed_puzzle_hash=proposed_puzzle.get_tree_hash(),
+ )
+
+ full_proposal_puzzle = curry_singleton(launcher_coin.name(), dao_proposal_puzzle)
+ full_proposal_puzzle_hash = full_proposal_puzzle.get_tree_hash()
+
+ announcement_set: Set[Announcement] = set()
+ announcement_message = Program.to(
+ [full_proposal_puzzle_hash, dao_rules.proposal_minimum_amount, bytes(0x80)]
+ ).get_tree_hash()
+ announcement_set.add(Announcement(launcher_coin.name(), announcement_message))
+
+ tx_records: List[TransactionRecord] = await self.standard_wallet.generate_signed_transaction(
+ uint64(dao_rules.proposal_minimum_amount),
+ genesis_launcher_puz.get_tree_hash(),
+ tx_config,
+ fee,
+ origin_id=origin.name(),
+ coins=coins,
+ coin_announcements_to_consume=announcement_set,
+ )
+ tx_record: TransactionRecord = tx_records[0]
+
+ genesis_launcher_solution = Program.to(
+ [full_proposal_puzzle_hash, dao_rules.proposal_minimum_amount, bytes(0x80)]
+ )
+
+ launcher_cs = CoinSpend(launcher_coin, genesis_launcher_puz, genesis_launcher_solution)
+ launcher_sb = SpendBundle([launcher_cs], AugSchemeMPL.aggregate([]))
+ eve_coin = Coin(launcher_coin.name(), full_proposal_puzzle_hash, dao_rules.proposal_minimum_amount)
+
+ future_parent = LineageProof(
+ eve_coin.parent_coin_info,
+ dao_proposal_puzzle.get_tree_hash(),
+ uint64(eve_coin.amount),
+ )
+ eve_parent = LineageProof(
+ bytes32(launcher_coin.parent_coin_info),
+ bytes32(launcher_coin.puzzle_hash),
+ uint64(launcher_coin.amount),
+ )
+
+ await self.add_parent(bytes32(eve_coin.parent_coin_info), eve_parent)
+ await self.add_parent(eve_coin.name(), future_parent)
+
+ eve_spend = await self.generate_proposal_eve_spend(
+ eve_coin=eve_coin,
+ full_proposal_puzzle=full_proposal_puzzle,
+ dao_proposal_puzzle=dao_proposal_puzzle,
+ proposed_puzzle_reveal=proposed_puzzle,
+ launcher_coin=launcher_coin,
+ vote_amount=vote_amount,
+ )
+ assert tx_record
+ assert tx_record.spend_bundle is not None
+
+ full_spend = SpendBundle.aggregate([tx_record.spend_bundle, eve_spend, launcher_sb])
+
+ record = TransactionRecord(
+ confirmed_at_height=uint32(0),
+ created_at_time=uint64(int(time.time())),
+ to_puzzle_hash=full_proposal_puzzle.get_tree_hash(),
+ amount=uint64(dao_rules.proposal_minimum_amount),
+ fee_amount=fee,
+ confirmed=False,
+ sent=uint32(10),
+ spend_bundle=full_spend,
+ additions=full_spend.additions(),
+ removals=full_spend.removals(),
+ wallet_id=self.id(),
+ sent_to=[],
+ trade_id=None,
+ type=uint32(TransactionType.INCOMING_TX.value),
+ name=bytes32(token_bytes()),
+ memos=[],
+ valid_times=parse_timelock_info(extra_conditions),
+ )
+ return record
+
+ async def generate_proposal_eve_spend(
+ self,
+ *,
+ eve_coin: Coin,
+ full_proposal_puzzle: Program,
+ dao_proposal_puzzle: Program,
+ proposed_puzzle_reveal: Program,
+ launcher_coin: Coin,
+ vote_amount: uint64,
+ ) -> SpendBundle:
+ cat_wallet: CATWallet = self.wallet_state_manager.wallets[self.dao_info.cat_wallet_id]
+ cat_tail = cat_wallet.cat_info.limitations_program_hash
+ dao_cat_wallet = await DAOCATWallet.get_or_create_wallet_for_cat(
+ self.wallet_state_manager, self.standard_wallet, cat_tail.hex()
+ )
+ assert dao_cat_wallet is not None
+
+ dao_cat_spend = await dao_cat_wallet.create_vote_spend(
+ vote_amount, launcher_coin.name(), True, proposal_puzzle=dao_proposal_puzzle
+ )
+ vote_amounts = []
+ vote_coins = []
+ previous_votes = []
+ lockup_inner_puzhashes = []
+ for spend in dao_cat_spend.coin_spends:
+ spend_vote_amount = Program.from_bytes(bytes(spend.solution)).at("frrrrrrf").as_int()
+ vote_amounts.append(spend_vote_amount)
+ vote_coins.append(spend.coin.name())
+ previous_votes.append(
+ get_active_votes_from_lockup_puzzle(
+ get_innerpuzzle_from_cat_puzzle(Program.from_bytes(bytes(spend.puzzle_reveal)))
+ )
+ )
+ lockup_inner_puz = get_innerpuz_from_lockup_puzzle(
+ get_innerpuzzle_from_cat_puzzle(Program.from_bytes(bytes(spend.puzzle_reveal)))
+ )
+ assert isinstance(lockup_inner_puz, Program)
+ lockup_inner_puzhashes.append(lockup_inner_puz.get_tree_hash())
+ inner_sol = Program.to(
+ [
+ vote_amounts,
+ 1,
+ vote_coins,
+ previous_votes,
+ lockup_inner_puzhashes,
+ proposed_puzzle_reveal,
+ 0,
+ 0,
+ 0,
+ 0,
+ eve_coin.amount,
+ ]
+ )
+ # full solution is (lineage_proof my_amount inner_solution)
+ fullsol = Program.to(
+ [
+ [launcher_coin.parent_coin_info, launcher_coin.amount],
+ eve_coin.amount,
+ inner_sol,
+ ]
+ )
+ list_of_coinspends = [CoinSpend(eve_coin, full_proposal_puzzle, fullsol)]
+ unsigned_spend_bundle = SpendBundle(list_of_coinspends, G2Element())
+ return unsigned_spend_bundle.aggregate([unsigned_spend_bundle, dao_cat_spend])
+
+ async def generate_proposal_vote_spend(
+ self,
+ proposal_id: bytes32,
+ vote_amount: Optional[uint64],
+ is_yes_vote: bool,
+ tx_config: TXConfig,
+ fee: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> TransactionRecord:
+ self.log.info(f"Trying to create a proposal close spend with ID: {proposal_id}")
+ proposal_info = None
+ for pi in self.dao_info.proposals_list:
+ if pi.proposal_id == proposal_id:
+ proposal_info = pi
+ break
+ if proposal_info is None: # pragma: no cover
+ raise ValueError("Unable to find a proposal with that ID.")
+ if (proposal_info.timer_coin is None) and (
+ proposal_info.current_innerpuz == get_finished_state_puzzle(proposal_info.proposal_id)
+ ):
+ raise ValueError("This proposal is already closed. Feel free to unlock your coins.") # pragma: no cover
+ cat_wallet: CATWallet = self.wallet_state_manager.wallets[self.dao_info.cat_wallet_id]
+ cat_tail = cat_wallet.cat_info.limitations_program_hash
+ dao_cat_wallet = await DAOCATWallet.get_or_create_wallet_for_cat(
+ self.wallet_state_manager, self.standard_wallet, cat_tail.hex()
+ )
+ assert dao_cat_wallet is not None
+ assert proposal_info.current_innerpuz is not None
+
+ if vote_amount is None: # pragma: no cover
+ vote_amount = await dao_cat_wallet.get_votable_balance(proposal_id)
+ assert vote_amount is not None
+ dao_cat_spend = await dao_cat_wallet.create_vote_spend(
+ vote_amount, proposal_id, is_yes_vote, proposal_puzzle=proposal_info.current_innerpuz
+ )
+ vote_amounts = []
+ vote_coins = []
+ previous_votes = []
+ lockup_inner_puzhashes = []
+ assert dao_cat_spend is not None
+ for spend in dao_cat_spend.coin_spends:
+ vote_amounts.append(
+ Program.from_bytes(bytes(spend.solution)).at("frrrrrrf")
+ ) # this is the vote_amount field of the solution
+ vote_coins.append(spend.coin.name())
+ previous_votes.append(
+ get_active_votes_from_lockup_puzzle(
+ get_innerpuzzle_from_cat_puzzle(Program.from_bytes(bytes(spend.puzzle_reveal)))
+ )
+ )
+ lockup_inner_puz = get_innerpuz_from_lockup_puzzle(
+ get_innerpuzzle_from_cat_puzzle(Program.from_bytes(bytes(spend.puzzle_reveal)))
+ )
+ assert isinstance(lockup_inner_puz, Program)
+ lockup_inner_puzhashes.append(lockup_inner_puz.get_tree_hash())
+ inner_sol = Program.to(
+ [
+ vote_amounts,
+ 1 if is_yes_vote else 0,
+ vote_coins,
+ previous_votes,
+ lockup_inner_puzhashes,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ proposal_info.current_coin.amount,
+ ]
+ )
+ parent_info = self.get_parent_for_coin(proposal_info.current_coin)
+ assert parent_info is not None
+ # full solution is (lineage_proof my_amount inner_solution)
+ fullsol = Program.to(
+ [
+ [
+ parent_info.parent_name,
+ parent_info.inner_puzzle_hash,
+ parent_info.amount,
+ ],
+ proposal_info.current_coin.amount,
+ inner_sol,
+ ]
+ )
+ full_proposal_puzzle = curry_singleton(proposal_id, proposal_info.current_innerpuz)
+ list_of_coinspends = [CoinSpend(proposal_info.current_coin, full_proposal_puzzle, fullsol)]
+ unsigned_spend_bundle = SpendBundle(list_of_coinspends, G2Element())
+ if fee > 0:
+ chia_tx = await self.standard_wallet.create_tandem_xch_tx(
+ fee,
+ tx_config,
+ )
+ assert chia_tx.spend_bundle is not None
+ spend_bundle = unsigned_spend_bundle.aggregate([unsigned_spend_bundle, dao_cat_spend, chia_tx.spend_bundle])
+ else:
+ spend_bundle = unsigned_spend_bundle.aggregate([unsigned_spend_bundle, dao_cat_spend])
+
+ record = TransactionRecord(
+ confirmed_at_height=uint32(0),
+ created_at_time=uint64(int(time.time())),
+ to_puzzle_hash=full_proposal_puzzle.get_tree_hash(),
+ amount=uint64(1),
+ fee_amount=fee,
+ confirmed=False,
+ sent=uint32(10),
+ spend_bundle=spend_bundle,
+ additions=spend_bundle.additions(),
+ removals=spend_bundle.removals(),
+ wallet_id=self.id(),
+ sent_to=[],
+ trade_id=None,
+ type=uint32(TransactionType.INCOMING_TX.value),
+ name=bytes32(token_bytes()),
+ memos=[],
+ valid_times=parse_timelock_info(extra_conditions),
+ )
+ return record
+
+ async def create_proposal_close_spend(
+ self,
+ proposal_id: bytes32,
+ tx_config: TXConfig,
+ genesis_id: Optional[bytes32] = None,
+ fee: uint64 = uint64(0),
+ self_destruct: bool = False,
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> TransactionRecord:
+ self.log.info(f"Trying to create a proposal close spend with ID: {proposal_id}")
+ proposal_info = None
+ for pi in self.dao_info.proposals_list:
+ if pi.proposal_id == proposal_id:
+ proposal_info = pi
+ break
+ if proposal_info is None: # pragma: no cover
+ raise ValueError("Unable to find a proposal with that ID.")
+ if proposal_info.timer_coin is None: # pragma: no cover
+ raise ValueError("This proposal is already closed. Feel free to unlock your coins.")
+ assert self.dao_info.current_treasury_innerpuz is not None
+ curried_args = uncurry_treasury(self.dao_info.current_treasury_innerpuz)
+ (
+ _DAO_TREASURY_MOD_HASH,
+ proposal_validator,
+ proposal_timelock,
+ soft_close_length,
+ attendance_required,
+ pass_percentage,
+ self_destruct_length,
+ oracle_spend_delay,
+ ) = curried_args
+ proposal_state = await self.get_proposal_state(proposal_id)
+ if not proposal_state["closable"]: # pragma: no cover
+ raise ValueError(f"This proposal is not ready to be closed. proposal_id: {proposal_id}")
+ if proposal_state["passed"]:
+ self.log.info(f"Closing passed proposal: {proposal_id}")
+ else:
+ self.log.info(f"Closing failed proposal: {proposal_id}")
+ assert proposal_info.current_innerpuz is not None
+ full_proposal_puzzle = curry_singleton(proposal_id, proposal_info.current_innerpuz)
+ assert proposal_info.current_coin.puzzle_hash == full_proposal_puzzle.get_tree_hash()
+ solution = Program.to(
+ [
+ proposal_validator.get_tree_hash(),
+ 0,
+ proposal_timelock,
+ pass_percentage,
+ attendance_required,
+ 0,
+ soft_close_length,
+ self_destruct_length,
+ oracle_spend_delay,
+ 1 if self_destruct else 0,
+ ]
+ )
+ parent_info = self.get_parent_for_coin(proposal_info.current_coin)
+ assert parent_info is not None
+ fullsol = Program.to(
+ [
+ [
+ parent_info.parent_name,
+ parent_info.inner_puzzle_hash,
+ parent_info.amount,
+ ],
+ proposal_info.current_coin.amount,
+ solution,
+ ]
+ )
+ proposal_cs = CoinSpend(proposal_info.current_coin, full_proposal_puzzle, fullsol)
+ if not self_destruct:
+ timer_puzzle = get_proposal_timer_puzzle(
+ self.get_cat_tail_hash(),
+ proposal_info.proposal_id,
+ self.dao_info.treasury_id,
+ )
+ c_a, curried_args = uncurry_proposal(proposal_info.current_innerpuz)
+ (
+ SELF_HASH,
+ PROPOSAL_ID,
+ PROPOSED_PUZ_HASH,
+ YES_VOTES,
+ TOTAL_VOTES,
+ ) = c_a.as_iter()
+
+ if TOTAL_VOTES.as_int() < attendance_required.as_int(): # pragma: no cover
+ raise ValueError("Unable to pass this proposal as it has not met the minimum vote attendance.")
+ timer_solution = Program.to(
+ [
+ YES_VOTES,
+ TOTAL_VOTES,
+ PROPOSED_PUZ_HASH,
+ proposal_timelock,
+ proposal_id,
+ proposal_info.current_coin.amount,
+ ]
+ )
+ timer_cs = CoinSpend(proposal_info.timer_coin, timer_puzzle, timer_solution)
+
+ full_treasury_puz = curry_singleton(self.dao_info.treasury_id, self.dao_info.current_treasury_innerpuz)
+ assert isinstance(self.dao_info.current_treasury_coin, Coin)
+ assert full_treasury_puz.get_tree_hash() == self.dao_info.current_treasury_coin.puzzle_hash
+
+ cat_spend_bundle = None
+ delegated_puzzle_sb = None
+ puzzle_reveal = await self.fetch_proposed_puzzle_reveal(proposal_id)
+ if proposal_state["passed"] and not self_destruct:
+ validator_solution = Program.to(
+ [
+ proposal_id,
+ TOTAL_VOTES,
+ YES_VOTES,
+ proposal_info.current_coin.parent_coin_info,
+ proposal_info.current_coin.amount,
+ ]
+ )
+
+ proposal_type, curried_args = get_proposal_args(puzzle_reveal)
+ if proposal_type == ProposalType.SPEND:
+ (
+ TREASURY_SINGLETON_STRUCT,
+ CAT_MOD_HASH,
+ CONDITIONS,
+ LIST_OF_TAILHASH_CONDITIONS,
+ P2_SINGLETON_VIA_DELEGATED_PUZZLE_PUZHASH,
+ ) = curried_args.as_iter()
+
+ sum = 0
+ coin_spends = []
+ xch_parent_amount_list = []
+ tailhash_parent_amount_list = []
+ treasury_inner_puzhash = self.dao_info.current_treasury_innerpuz.get_tree_hash()
+ p2_singleton_puzzle = get_p2_singleton_puzzle(self.dao_info.treasury_id)
+ cat_launcher = create_cat_launcher_for_singleton_id(self.dao_info.treasury_id)
+
+ # handle CAT minting
+ for cond in CONDITIONS.as_iter():
+ if cond.first().as_int() == 51:
+ if cond.rest().first().as_atom() == cat_launcher.get_tree_hash():
+ cat_wallet: CATWallet = self.wallet_state_manager.wallets[self.dao_info.cat_wallet_id]
+ cat_tail_hash = cat_wallet.cat_info.limitations_program_hash
+ mint_amount = cond.rest().rest().first().as_int()
+ new_cat_puzhash = cond.rest().rest().rest().first().first().as_atom()
+ eve_puzzle = curry_cat_eve(new_cat_puzhash)
+ if genesis_id is None:
+ tail_reconstruction = cat_wallet.cat_info.my_tail
+ else: # pragma: no cover
+ tail_reconstruction = generate_cat_tail(genesis_id, self.dao_info.treasury_id)
+ assert tail_reconstruction is not None
+ assert tail_reconstruction.get_tree_hash() == cat_tail_hash
+ assert isinstance(self.dao_info.current_treasury_coin, Coin)
+ cat_launcher_coin = Coin(
+ self.dao_info.current_treasury_coin.name(), cat_launcher.get_tree_hash(), mint_amount
+ )
+ full_puz = construct_cat_puzzle(CAT_MOD, cat_tail_hash, eve_puzzle)
+
+ solution = Program.to(
+ [
+ treasury_inner_puzhash,
+ self.dao_info.current_treasury_coin.parent_coin_info,
+ full_puz.get_tree_hash(),
+ mint_amount,
+ ]
+ )
+ coin_spends.append(CoinSpend(cat_launcher_coin, cat_launcher, solution))
+ eve_coin = Coin(cat_launcher_coin.name(), full_puz.get_tree_hash(), mint_amount)
+ tail_solution = Program.to([cat_launcher_coin.parent_coin_info, cat_launcher_coin.amount])
+ solution = Program.to([mint_amount, tail_reconstruction, tail_solution])
+ new_spendable_cat = SpendableCAT(
+ eve_coin,
+ cat_tail_hash,
+ eve_puzzle,
+ solution,
+ )
+ if cat_spend_bundle is None:
+ cat_spend_bundle = unsigned_spend_bundle_for_spendable_cats(
+ CAT_MOD, [new_spendable_cat]
+ )
+ else: # pragma: no cover
+ cat_spend_bundle = cat_spend_bundle.aggregate(
+ [
+ cat_spend_bundle,
+ unsigned_spend_bundle_for_spendable_cats(CAT_MOD, [new_spendable_cat]),
+ ]
+ )
+
+ for condition_statement in CONDITIONS.as_iter():
+ if condition_statement.first().as_int() == 51:
+ sum += condition_statement.rest().rest().first().as_int()
+ if sum > 0:
+ xch_coins = await self.select_coins_for_asset_type(uint64(sum))
+ for xch_coin in xch_coins:
+ xch_parent_amount_list.append([xch_coin.parent_coin_info, xch_coin.amount])
+ solution = Program.to(
+ [
+ 0,
+ treasury_inner_puzhash,
+ 0,
+ 0,
+ xch_coin.name(),
+ ]
+ )
+ coin_spends.append(CoinSpend(xch_coin, p2_singleton_puzzle, solution))
+ delegated_puzzle_sb = SpendBundle(coin_spends, AugSchemeMPL.aggregate([]))
+ for tail_hash_conditions_pair in LIST_OF_TAILHASH_CONDITIONS.as_iter():
+ tail_hash: bytes32 = tail_hash_conditions_pair.first().as_atom()
+ conditions: Program = tail_hash_conditions_pair.rest().first()
+ sum_of_conditions = 0
+ sum_of_coins = 0
+ spendable_cat_list = []
+ for condition in conditions.as_iter():
+ if condition.first().as_int() == 51:
+ sum_of_conditions += condition.rest().rest().first().as_int()
+ cat_coins = await self.select_coins_for_asset_type(uint64(sum_of_conditions), tail_hash)
+ parent_amount_list = []
+ for cat_coin in cat_coins:
+ sum_of_coins += cat_coin.amount
+ parent_amount_list.append([cat_coin.parent_coin_info, cat_coin.amount])
+ lineage_proof = await self.fetch_cat_lineage_proof(cat_coin)
+ if cat_coin == cat_coins[-1]: # the last coin is the one that makes the conditions
+ if sum_of_coins - sum_of_conditions > 0:
+ p2_singleton_puzhash = p2_singleton_puzzle.get_tree_hash()
+ change_condition = Program.to(
+ [
+ 51,
+ p2_singleton_puzhash,
+ sum_of_coins - sum_of_conditions,
+ [p2_singleton_puzhash],
+ ]
+ )
+ delegated_puzzle = Program.to((1, change_condition.cons(conditions)))
+ else: # pragma: no cover
+ delegated_puzzle = Program.to((1, conditions))
+
+ solution = Program.to(
+ [
+ 0,
+ treasury_inner_puzhash,
+ delegated_puzzle,
+ 0,
+ cat_coin.name(),
+ ]
+ )
+ else:
+ solution = Program.to(
+ [
+ 0,
+ treasury_inner_puzhash,
+ 0,
+ 0,
+ cat_coin.name(),
+ ]
+ )
+ new_spendable_cat = SpendableCAT(
+ cat_coin,
+ tail_hash,
+ p2_singleton_puzzle,
+ solution,
+ lineage_proof=lineage_proof,
+ )
+ spendable_cat_list.append(new_spendable_cat)
+ # create or merge with other CAT spends
+ if cat_spend_bundle is None:
+ cat_spend_bundle = unsigned_spend_bundle_for_spendable_cats(CAT_MOD, spendable_cat_list)
+ else:
+ cat_spend_bundle = cat_spend_bundle.aggregate(
+ [cat_spend_bundle, unsigned_spend_bundle_for_spendable_cats(CAT_MOD, spendable_cat_list)]
+ )
+ tailhash_parent_amount_list.append([tail_hash, parent_amount_list])
+
+ delegated_solution = Program.to(
+ [
+ xch_parent_amount_list,
+ tailhash_parent_amount_list,
+ treasury_inner_puzhash,
+ ]
+ )
+
+ elif proposal_type == ProposalType.UPDATE:
+ (
+ TREASURY_MOD_HASH,
+ VALIDATOR_MOD_HASH,
+ SINGLETON_STRUCT,
+ PROPOSAL_SELF_HASH,
+ PROPOSAL_MINIMUM_AMOUNT,
+ PROPOSAL_EXCESS_PAYOUT_PUZHASH,
+ PROPOSAL_LENGTH,
+ PROPOSAL_SOFTCLOSE_LENGTH,
+ ATTENDANCE_REQUIRED,
+ PASS_MARGIN,
+ PROPOSAL_SELF_DESTRUCT_TIME,
+ ORACLE_SPEND_DELAY,
+ ) = curried_args.as_iter()
+ coin_spends = []
+ treasury_inner_puzhash = self.dao_info.current_treasury_innerpuz.get_tree_hash()
+ delegated_solution = Program.to([])
+
+ treasury_solution = Program.to(
+ [
+ [proposal_info.current_coin.name(), PROPOSED_PUZ_HASH.as_atom(), 0],
+ validator_solution,
+ puzzle_reveal,
+ delegated_solution,
+ ]
+ )
+ else:
+ treasury_solution = Program.to([0, 0, 0, 0, 0, 0])
+
+ assert self.dao_info.current_treasury_coin is not None
+ parent_info = self.get_parent_for_coin(self.dao_info.current_treasury_coin)
+ assert parent_info is not None
+ full_treasury_solution = Program.to(
+ [
+ [
+ parent_info.parent_name,
+ parent_info.inner_puzzle_hash,
+ parent_info.amount,
+ ],
+ self.dao_info.current_treasury_coin.amount,
+ treasury_solution,
+ ]
+ )
+
+ treasury_cs = CoinSpend(self.dao_info.current_treasury_coin, full_treasury_puz, full_treasury_solution)
+
+ if self_destruct:
+ spend_bundle = SpendBundle([proposal_cs, treasury_cs], AugSchemeMPL.aggregate([]))
+ else:
+ spend_bundle = SpendBundle([proposal_cs, timer_cs, treasury_cs], AugSchemeMPL.aggregate([]))
+ if fee > 0:
+ chia_tx = await self.standard_wallet.create_tandem_xch_tx(fee, tx_config)
+ assert chia_tx.spend_bundle is not None
+ full_spend = SpendBundle.aggregate([spend_bundle, chia_tx.spend_bundle])
+ else:
+ full_spend = SpendBundle.aggregate([spend_bundle])
+ if cat_spend_bundle is not None:
+ full_spend = full_spend.aggregate([full_spend, cat_spend_bundle])
+ if delegated_puzzle_sb is not None:
+ full_spend = full_spend.aggregate([full_spend, delegated_puzzle_sb])
+
+ record = TransactionRecord(
+ confirmed_at_height=uint32(0),
+ created_at_time=uint64(int(time.time())),
+ to_puzzle_hash=get_finished_state_puzzle(proposal_info.proposal_id).get_tree_hash(),
+ amount=uint64(1),
+ fee_amount=fee,
+ confirmed=False,
+ sent=uint32(10),
+ spend_bundle=full_spend,
+ additions=full_spend.additions(),
+ removals=full_spend.removals(),
+ wallet_id=self.id(),
+ sent_to=[],
+ trade_id=None,
+ type=uint32(TransactionType.INCOMING_TX.value),
+ name=bytes32(token_bytes()),
+ memos=[],
+ valid_times=parse_timelock_info(extra_conditions),
+ )
+ return record
+
+ async def fetch_proposed_puzzle_reveal(self, proposal_id: bytes32) -> Program:
+ wallet_node: Any = self.wallet_state_manager.wallet_node
+ peer: WSChiaConnection = wallet_node.get_full_node_peer()
+ if peer is None: # pragma: no cover
+ raise ValueError("Could not find any peers to request puzzle and solution from")
+ # The proposal_id is launcher coin, so proposal_id's child is eve and the eve spend contains the reveal
+ children = await wallet_node.fetch_children(proposal_id, peer)
+ eve_state = children[0]
+
+ eve_spend = await fetch_coin_spend(eve_state.created_height, eve_state.coin, peer)
+ puzzle_reveal = get_proposed_puzzle_reveal_from_solution(eve_spend.solution.to_program())
+ return puzzle_reveal
+
+ async def fetch_cat_lineage_proof(self, cat_coin: Coin) -> LineageProof:
+ wallet_node: Any = self.wallet_state_manager.wallet_node
+ peer: WSChiaConnection = wallet_node.get_full_node_peer()
+ if peer is None: # pragma: no cover
+ raise ValueError("Could not find any peers to request puzzle and solution from")
+ state = await wallet_node.get_coin_state([cat_coin.parent_coin_info], peer)
+ assert state is not None
+ # CoinState contains Coin, spent_height, and created_height,
+ parent_spend = await fetch_coin_spend(state[0].spent_height, state[0].coin, peer)
+ parent_inner_puz = get_innerpuzzle_from_cat_puzzle(parent_spend.puzzle_reveal.to_program())
+ return LineageProof(state[0].coin.parent_coin_info, parent_inner_puz.get_tree_hash(), state[0].coin.amount)
+
+ async def _create_treasury_fund_transaction(
+ self,
+ funding_wallet: WalletProtocol[Any],
+ amount: uint64,
+ tx_config: TXConfig,
+ fee: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> List[TransactionRecord]:
+ if funding_wallet.type() == WalletType.STANDARD_WALLET.value:
+ p2_singleton_puzhash = get_p2_singleton_puzhash(self.dao_info.treasury_id, asset_id=None)
+ wallet: Wallet = funding_wallet # type: ignore[assignment]
+ return await wallet.generate_signed_transaction(
+ amount,
+ p2_singleton_puzhash,
+ tx_config,
+ fee=fee,
+ memos=[p2_singleton_puzhash],
+ )
+ elif funding_wallet.type() == WalletType.CAT.value:
+ cat_wallet: CATWallet = funding_wallet # type: ignore[assignment]
+ # generate_signed_transaction has a different type signature in Wallet and CATWallet
+ # CATWallet uses a List of amounts and a List of puzhashes as the first two arguments
+ p2_singleton_puzhash = get_p2_singleton_puzhash(self.dao_info.treasury_id)
+ tx_records: List[TransactionRecord] = await cat_wallet.generate_signed_transaction(
+ [amount],
+ [p2_singleton_puzhash],
+ tx_config,
+ fee=fee,
+ extra_conditions=extra_conditions,
+ )
+ return tx_records
+ else: # pragma: no cover
+ raise ValueError(f"Assets of type {funding_wallet.type()} are not currently supported.")
+
+ async def create_add_funds_to_treasury_spend(
+ self,
+ amount: uint64,
+ tx_config: TXConfig,
+ fee: uint64 = uint64(0),
+ funding_wallet_id: uint32 = uint32(1),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> TransactionRecord:
+ # set up the p2_singleton
+ funding_wallet = self.wallet_state_manager.wallets[funding_wallet_id]
+ tx_record = await self._create_treasury_fund_transaction(
+ funding_wallet, amount, tx_config, fee, extra_conditions=extra_conditions
+ )
+ return tx_record[0]
+
+ async def fetch_singleton_lineage_proof(self, coin: Coin) -> LineageProof:
+ wallet_node: Any = self.wallet_state_manager.wallet_node
+ peer: WSChiaConnection = wallet_node.get_full_node_peer()
+ if peer is None: # pragma: no cover
+ raise ValueError("Could not find any peers to request puzzle and solution from")
+ state = await wallet_node.get_coin_state([coin.parent_coin_info], peer)
+ assert state is not None
+ # CoinState contains Coin, spent_height, and created_height,
+ parent_spend = await fetch_coin_spend(state[0].spent_height, state[0].coin, peer)
+ parent_inner_puz = get_inner_puzzle_from_singleton(parent_spend.puzzle_reveal.to_program())
+ assert isinstance(parent_inner_puz, Program)
+ return LineageProof(state[0].coin.parent_coin_info, parent_inner_puz.get_tree_hash(), state[0].coin.amount)
+
+ async def free_coins_from_finished_proposals(
+ self,
+ tx_config: TXConfig,
+ fee: uint64 = uint64(0),
+ extra_conditions: Tuple[Condition, ...] = tuple(),
+ ) -> TransactionRecord:
+ dao_cat_wallet: DAOCATWallet = self.wallet_state_manager.wallets[self.dao_info.dao_cat_wallet_id]
+ full_spend = None
+ spends = []
+ closed_list = []
+ finished_puz = None
+ for proposal_info in self.dao_info.proposals_list:
+ if proposal_info.closed:
+ closed_list.append(proposal_info.proposal_id)
+ inner_solution = Program.to(
+ [
+ proposal_info.current_coin.amount,
+ ]
+ )
+ lineage_proof: LineageProof = await self.fetch_singleton_lineage_proof(proposal_info.current_coin)
+ solution = Program.to([lineage_proof.to_program(), proposal_info.current_coin.amount, inner_solution])
+ finished_puz = get_finished_state_puzzle(proposal_info.proposal_id)
+ cs = CoinSpend(proposal_info.current_coin, finished_puz, solution)
+ prop_sb = SpendBundle([cs], AugSchemeMPL.aggregate([]))
+ spends.append(prop_sb)
+
+ sb = await dao_cat_wallet.remove_active_proposal(closed_list, tx_config=tx_config)
+ spends.append(sb)
+
+ if not spends: # pragma: no cover
+ raise ValueError("No proposals are available for release")
+
+ full_spend = SpendBundle.aggregate(spends)
+ if fee > 0:
+ chia_tx = await self.standard_wallet.create_tandem_xch_tx(fee, tx_config)
+ assert chia_tx.spend_bundle is not None
+ full_spend = full_spend.aggregate([full_spend, chia_tx.spend_bundle])
+
+ assert isinstance(finished_puz, Program)
+ record = TransactionRecord(
+ confirmed_at_height=uint32(0),
+ created_at_time=uint64(int(time.time())),
+ to_puzzle_hash=finished_puz.get_tree_hash(),
+ amount=uint64(1),
+ fee_amount=fee,
+ confirmed=False,
+ sent=uint32(10),
+ spend_bundle=full_spend,
+ additions=full_spend.additions(),
+ removals=full_spend.removals(),
+ wallet_id=self.id(),
+ sent_to=[],
+ trade_id=None,
+ type=uint32(TransactionType.INCOMING_TX.value),
+ name=bytes32(token_bytes()),
+ memos=[],
+ valid_times=parse_timelock_info(extra_conditions),
+ )
+ return record
+
+ async def parse_proposal(self, proposal_id: bytes32) -> Dict[str, Any]:
+ for prop_info in self.dao_info.proposals_list:
+ if prop_info.proposal_id == proposal_id:
+ state = await self.get_proposal_state(proposal_id)
+ proposed_puzzle_reveal = await self.fetch_proposed_puzzle_reveal(proposal_id)
+ proposal_type, curried_args = get_proposal_args(proposed_puzzle_reveal)
+ if proposal_type == ProposalType.SPEND:
+ cat_launcher = create_cat_launcher_for_singleton_id(self.dao_info.treasury_id)
+ (
+ TREASURY_SINGLETON_STRUCT,
+ CAT_MOD_HASH,
+ CONDITIONS,
+ LIST_OF_TAILHASH_CONDITIONS,
+ P2_SINGLETON_VIA_DELEGATED_PUZZLE_PUZHASH,
+ ) = curried_args.as_iter()
+ mint_amount = None
+ new_cat_puzhash = None
+ xch_created_coins = []
+ for cond in CONDITIONS.as_iter():
+ if cond.first().as_int() == 51:
+ if cond.rest().first().as_atom() == cat_launcher.get_tree_hash():
+ mint_amount = cond.rest().rest().first().as_int()
+ new_cat_puzhash = cond.rest().rest().rest().first().first().as_atom()
+ else:
+ cc = {"puzzle_hash": cond.at("rf").as_atom(), "amount": cond.at("rrf").as_int()}
+ xch_created_coins.append(cc)
+
+ asset_create_coins: List[Dict[Any, Any]] = []
+ for asset in LIST_OF_TAILHASH_CONDITIONS.as_iter():
+ if asset == Program.to(0): # pragma: no cover
+ asset_dict: Optional[Dict[str, Any]] = None
+ else:
+ asset_id = asset.first().as_atom()
+ cc_list = []
+ for cond in asset.rest().first().as_iter():
+ if cond.first().as_int() == 51:
+ asset_dict = {
+ "puzzle_hash": cond.at("rf").as_atom(),
+ "amount": cond.at("rrf").as_int(),
+ }
+ # cc_list.append([asset_id, asset_dict])
+ cc_list.append(asset_dict)
+ asset_create_coins.append({"asset_id": asset_id, "conditions": cc_list})
+ dictionary: Dict[str, Any] = {
+ "state": state,
+ "proposal_type": proposal_type.value,
+ "proposed_puzzle_reveal": proposed_puzzle_reveal,
+ "xch_conditions": xch_created_coins,
+ "asset_conditions": asset_create_coins,
+ }
+ if mint_amount is not None and new_cat_puzhash is not None:
+ dictionary["mint_amount"] = mint_amount
+ dictionary["new_cat_puzhash"] = new_cat_puzhash
+ elif proposal_type == ProposalType.UPDATE:
+ dao_rules = get_dao_rules_from_update_proposal(proposed_puzzle_reveal)
+ dictionary = {
+ "state": state,
+ "proposal_type": proposal_type.value,
+ "dao_rules": dao_rules,
+ }
+ return dictionary
+ raise ValueError(f"Unable to find proposal with id: {proposal_id.hex()}") # pragma: no cover
+
+ async def add_parent(self, name: bytes32, parent: Optional[LineageProof]) -> None:
+ self.log.info(f"Adding parent {name}: {parent}")
+ current_list = self.dao_info.parent_info.copy()
+ current_list.append((name, parent))
+ dao_info: DAOInfo = DAOInfo(
+ self.dao_info.treasury_id,
+ self.dao_info.cat_wallet_id,
+ self.dao_info.dao_cat_wallet_id,
+ self.dao_info.proposals_list,
+ current_list,
+ self.dao_info.current_treasury_coin,
+ self.dao_info.current_treasury_innerpuz,
+ self.dao_info.singleton_block_height,
+ self.dao_info.filter_below_vote_amount,
+ self.dao_info.assets,
+ self.dao_info.current_height,
+ )
+ await self.save_info(dao_info)
+
+ async def save_info(self, dao_info: DAOInfo) -> None:
+ self.dao_info = dao_info
+ current_info = self.wallet_info
+ data_str = json.dumps(dao_info.to_json_dict())
+ wallet_info = WalletInfo(current_info.id, current_info.name, current_info.type, data_str)
+ self.wallet_info = wallet_info
+ await self.wallet_state_manager.user_store.update_wallet(wallet_info)
+
+ def generate_wallet_name(self) -> str:
+ """
+ Generate a new DAO wallet name
+ :return: wallet name
+ """
+ max_num = 0
+ for wallet in self.wallet_state_manager.wallets.values():
+ if wallet.type() == WalletType.DAO: # pragma: no cover
+ matched = re.search(r"^Profile (\d+)$", wallet.wallet_info.name) # TODO: bug: wallet.wallet_info
+ if matched and int(matched.group(1)) > max_num:
+ max_num = int(matched.group(1))
+ return f"Profile {max_num + 1}"
+
+ def require_derivation_paths(self) -> bool:
+ return False
+
+ def get_cat_wallet_id(self) -> uint32:
+ return self.dao_info.cat_wallet_id
+
+ async def enter_dao_cat_voting_mode(
+ self,
+ amount: uint64,
+ tx_config: TXConfig,
+ ) -> List[TransactionRecord]:
+ dao_cat_wallet: DAOCATWallet = self.wallet_state_manager.wallets[self.dao_info.dao_cat_wallet_id]
+ return await dao_cat_wallet.enter_dao_cat_voting_mode(amount, tx_config)
+
+ @staticmethod
+ def get_next_interesting_coin(spend: CoinSpend) -> Optional[Coin]: # pragma: no cover
+ # CoinSpend of one of the coins that we cared about. This coin was spent in a block, but might be in a reorg
+ # If we return a value, it is a coin that we are also interested in (to support two transitions per block)
+ return get_most_recent_singleton_coin_from_coin_spend(spend)
+
+ async def get_tip(self, singleton_id: bytes32) -> Optional[Tuple[uint32, SingletonRecord]]:
+ ret: List[
+ Tuple[uint32, SingletonRecord]
+ ] = await self.wallet_state_manager.singleton_store.get_records_by_singleton_id(singleton_id)
+ if len(ret) == 0: # pragma: no cover
+ return None
+ return ret[-1]
+
+ async def get_tip_created_height(self, singleton_id: bytes32) -> Optional[int]: # pragma: no cover
+ ret: List[
+ Tuple[uint32, SingletonRecord]
+ ] = await self.wallet_state_manager.singleton_store.get_records_by_singleton_id(singleton_id)
+ if len(ret) < 1:
+ return None
+ assert isinstance(ret[-2], SingletonRecord)
+ return ret[-2].removed_height
+
+ async def add_or_update_proposal_info(
+ self,
+ new_state: CoinSpend,
+ block_height: uint32,
+ ) -> None:
+ new_dao_info = copy.copy(self.dao_info)
+ puzzle = get_inner_puzzle_from_singleton(new_state.puzzle_reveal)
+ if puzzle is None: # pragma: no cover
+ raise ValueError("get_innerpuzzle_from_puzzle failed")
+ solution = (
+ Program.from_bytes(bytes(new_state.solution)).rest().rest().first()
+ ) # get proposal solution from full singleton solution
+ singleton_id = singleton.get_singleton_id_from_puzzle(new_state.puzzle_reveal)
+ if singleton_id is None: # pragma: no cover
+ raise ValueError("get_singleton_id_from_puzzle failed")
+ ended = False
+ dao_rules = get_treasury_rules_from_puzzle(self.dao_info.current_treasury_innerpuz)
+ current_coin = get_most_recent_singleton_coin_from_coin_spend(new_state)
+ if current_coin is None: # pragma: no cover
+ raise ValueError("get_most_recent_singleton_coin_from_coin_spend failed")
+
+ current_innerpuz = get_new_puzzle_from_proposal_solution(puzzle, solution)
+ assert isinstance(current_innerpuz, Program)
+ assert current_coin.puzzle_hash == curry_singleton(singleton_id, current_innerpuz).get_tree_hash()
+ # check if our parent puzzle was the finished state
+ if puzzle.uncurry()[0] == DAO_FINISHED_STATE:
+ ended = True
+ index = 0
+ for current_info in new_dao_info.proposals_list:
+ # Search for current proposal_info
+ if current_info.proposal_id == singleton_id:
+ new_proposal_info = ProposalInfo(
+ singleton_id,
+ puzzle,
+ current_info.amount_voted,
+ current_info.yes_votes,
+ current_coin,
+ current_innerpuz,
+ current_info.timer_coin,
+ block_height,
+ current_info.passed,
+ ended,
+ )
+ new_dao_info.proposals_list[index] = new_proposal_info
+ await self.save_info(new_dao_info)
+ future_parent = LineageProof(
+ new_state.coin.parent_coin_info,
+ puzzle.get_tree_hash(),
+ uint64(new_state.coin.amount),
+ )
+ await self.add_parent(new_state.coin.name(), future_parent)
+ return
+
+ # check if we are the finished state
+ if current_innerpuz == get_finished_state_inner_puzzle(singleton_id):
+ ended = True
+
+ c_a, curried_args = uncurry_proposal(puzzle)
+ (
+ DAO_PROPOSAL_TIMER_MOD_HASH,
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_PUZHASH,
+ CAT_MOD_HASH,
+ DAO_FINISHED_STATE_HASH,
+ _DAO_TREASURY_MOD_HASH,
+ lockup_self_hash,
+ cat_tail_hash,
+ treasury_id,
+ ) = curried_args.as_iter()
+ (
+ curry_one,
+ proposal_id,
+ proposed_puzzle_hash,
+ yes_votes,
+ total_votes,
+ ) = c_a.as_iter()
+
+ if current_coin is None: # pragma: no cover
+ raise RuntimeError("get_most_recent_singleton_coin_from_coin_spend({new_state}) failed")
+
+ timer_coin = None
+ if solution.at("rrrrrrf").as_int() == 0:
+ # we need to add the vote amounts from the solution to get accurate totals
+ is_yes_vote = solution.at("rf").as_int()
+ votes_added = 0
+ for vote_amount in solution.first().as_iter():
+ votes_added += vote_amount.as_int()
+ else:
+ # If we have entered the finished state
+ # TODO: we need to alert the user that they can free up their coins
+ is_yes_vote = 0
+ votes_added = 0
+
+ if current_coin.amount < dao_rules.proposal_minimum_amount and not ended: # pragma: no cover
+ raise ValueError("this coin does not meet the minimum requirements and can be ignored")
+ new_total_votes = total_votes.as_int() + votes_added
+ if new_total_votes < self.dao_info.filter_below_vote_amount: # pragma: no cover
+ return # ignore all proposals below the filter amount
+
+ if is_yes_vote == 1:
+ new_yes_votes = yes_votes.as_int() + votes_added
+ else:
+ new_yes_votes = yes_votes.as_int()
+
+ required_yes_votes = (self.dao_rules.attendance_required * self.dao_rules.pass_percentage) // 10000
+ yes_votes_needed = max(0, required_yes_votes - new_yes_votes)
+
+ passed = True if yes_votes_needed == 0 else False
+
+ index = 0
+ for current_info in new_dao_info.proposals_list:
+ # Search for current proposal_info
+ if current_info.proposal_id == singleton_id:
+ # If we are receiving a voting spend update
+ new_proposal_info = ProposalInfo(
+ singleton_id,
+ puzzle,
+ new_total_votes,
+ new_yes_votes,
+ current_coin,
+ current_innerpuz,
+ current_info.timer_coin,
+ block_height,
+ passed,
+ ended,
+ )
+ new_dao_info.proposals_list[index] = new_proposal_info
+ await self.save_info(new_dao_info)
+ future_parent = LineageProof(
+ new_state.coin.parent_coin_info,
+ puzzle.get_tree_hash(),
+ uint64(new_state.coin.amount),
+ )
+ await self.add_parent(new_state.coin.name(), future_parent)
+ return
+ index = index + 1
+
+ # Search for the timer coin
+ if not ended:
+ wallet_node: Any = self.wallet_state_manager.wallet_node
+ peer: WSChiaConnection = wallet_node.get_full_node_peer()
+ if peer is None: # pragma: no cover
+ raise ValueError("Could not find any peers to request puzzle and solution from")
+ children = await wallet_node.fetch_children(singleton_id, peer)
+ assert len(children) > 0
+ found = False
+ parent_coin_id = singleton_id
+
+ if self.dao_info.current_treasury_innerpuz is None: # pragma: no cover
+ raise ValueError("self.dao_info.current_treasury_innerpuz is None")
+
+ timer_coin_puzhash = get_proposal_timer_puzzle(
+ cat_tail_hash.as_atom(),
+ singleton_id,
+ self.dao_info.treasury_id,
+ ).get_tree_hash()
+
+ while not found and len(children) > 0:
+ children = await wallet_node.fetch_children(parent_coin_id, peer)
+ if len(children) == 0: # pragma: no cover
+ break
+ children_state = [child for child in children if child.coin.amount % 2 == 1]
+ assert children_state is not None
+ assert len(children_state) > 0
+ child_state = children_state[0]
+ for child in children:
+ if child.coin.puzzle_hash == timer_coin_puzhash:
+ found = True
+ timer_coin = child.coin
+ break
+ child_coin = child_state.coin
+ parent_coin_id = child_coin.name()
+
+ # If we reach here then we don't currently know about this coin
+ new_proposal_info = ProposalInfo(
+ singleton_id,
+ puzzle,
+ uint64(new_total_votes),
+ uint64(new_yes_votes),
+ current_coin,
+ current_innerpuz,
+ timer_coin, # if this is None then the proposal has finished
+ block_height, # block height that current proposal singleton coin was created
+ passed,
+ ended,
+ )
+ new_dao_info.proposals_list.append(new_proposal_info)
+ await self.save_info(new_dao_info)
+ future_parent = LineageProof(
+ new_state.coin.parent_coin_info,
+ puzzle.get_tree_hash(),
+ uint64(new_state.coin.amount),
+ )
+ await self.add_parent(new_state.coin.name(), future_parent)
+ return
+
+ async def update_closed_proposal_coin(self, new_state: CoinSpend, block_height: uint32) -> None:
+ new_dao_info = copy.copy(self.dao_info)
+ puzzle = get_inner_puzzle_from_singleton(new_state.puzzle_reveal)
+ proposal_id = singleton.get_singleton_id_from_puzzle(new_state.puzzle_reveal)
+ current_coin = get_most_recent_singleton_coin_from_coin_spend(new_state)
+ index = 0
+ for pi in self.dao_info.proposals_list:
+ if pi.proposal_id == proposal_id:
+ assert isinstance(current_coin, Coin)
+ new_info = ProposalInfo(
+ proposal_id,
+ pi.inner_puzzle,
+ pi.amount_voted,
+ pi.yes_votes,
+ current_coin,
+ pi.current_innerpuz,
+ pi.timer_coin,
+ pi.singleton_block_height,
+ pi.passed,
+ pi.closed,
+ )
+ new_dao_info.proposals_list[index] = new_info
+ await self.save_info(new_dao_info)
+ assert isinstance(puzzle, Program)
+ future_parent = LineageProof(
+ new_state.coin.parent_coin_info,
+ puzzle.get_tree_hash(),
+ uint64(new_state.coin.amount),
+ )
+ await self.add_parent(new_state.coin.name(), future_parent)
+ return
+ index = index + 1
+
+ async def get_proposal_state(self, proposal_id: bytes32) -> Dict[str, Union[int, bool]]:
+ """
+ Use this to figure out whether a proposal has passed or failed and whether it can be closed
+ Given a proposal_id:
+ - if required yes votes are recorded then proposal passed.
+ - if timelock and attendance are met then proposal can close
+ Returns a dict of passed and closable bools, and the remaining votes/blocks needed
+
+ Note that a proposal can be in a passed and closable state now, but become failed if a large number of
+ 'no' votes are recieved before the soft close is reached.
+ """
+ for prop in self.dao_info.proposals_list:
+ if prop.proposal_id == proposal_id:
+ is_closed = prop.closed
+ break
+ else: # pragma: no cover
+ raise ValueError(f"Proposal not found for id {proposal_id}")
+
+ wallet_node = self.wallet_state_manager.wallet_node
+ peer: WSChiaConnection = wallet_node.get_full_node_peer()
+ if peer is None: # pragma: no cover
+ raise ValueError("Could not find any peers to request puzzle and solution from")
+ assert isinstance(prop.timer_coin, Coin)
+ timer_cs = (await wallet_node.get_coin_state([prop.timer_coin.name()], peer))[0]
+ peak = await self.wallet_state_manager.blockchain.get_peak_block()
+ blocks_elapsed = peak.height - timer_cs.created_height
+
+ required_yes_votes = (self.dao_rules.attendance_required * self.dao_rules.pass_percentage) // 10000
+ total_votes_needed = max(0, self.dao_rules.attendance_required - prop.amount_voted)
+ yes_votes_needed = max(0, required_yes_votes - prop.yes_votes)
+ blocks_needed = max(0, self.dao_rules.proposal_timelock - blocks_elapsed)
+
+ passed = True if yes_votes_needed == 0 else False
+ closable = True if total_votes_needed == blocks_needed == 0 else False
+ proposal_state = {
+ "total_votes_needed": total_votes_needed,
+ "yes_votes_needed": yes_votes_needed,
+ "blocks_needed": blocks_needed,
+ "passed": passed,
+ "closable": closable,
+ "closed": is_closed,
+ }
+ return proposal_state
+
+ async def update_treasury_info(
+ self,
+ new_state: CoinSpend,
+ block_height: uint32,
+ ) -> None:
+ if self.dao_info.singleton_block_height <= block_height:
+ # TODO: what do we do here?
+ # return
+ pass
+ puzzle = get_inner_puzzle_from_singleton(new_state.puzzle_reveal)
+ if puzzle is None: # pragma: no cover
+ raise ValueError("get_innerpuzzle_from_puzzle failed")
+ solution = (
+ Program.from_bytes(bytes(new_state.solution)).rest().rest().first()
+ ) # get proposal solution from full singleton solution
+ new_innerpuz = get_new_puzzle_from_treasury_solution(puzzle, solution)
+ child_coin = get_most_recent_singleton_coin_from_coin_spend(new_state)
+ assert isinstance(child_coin, Coin)
+ assert isinstance(self.dao_info.current_treasury_coin, Coin)
+ if child_coin.puzzle_hash != self.dao_info.current_treasury_coin.puzzle_hash:
+ # update dao rules
+ assert isinstance(new_innerpuz, Program)
+ self.dao_rules = get_treasury_rules_from_puzzle(new_innerpuz)
+ dao_info = dataclasses.replace(
+ self.dao_info,
+ current_treasury_coin=child_coin,
+ current_treasury_innerpuz=new_innerpuz,
+ singleton_block_height=block_height,
+ )
+ await self.save_info(dao_info)
+ future_parent = LineageProof(
+ new_state.coin.parent_coin_info,
+ puzzle.get_tree_hash(),
+ uint64(new_state.coin.amount),
+ )
+ await self.add_parent(new_state.coin.name(), future_parent)
+ return
+
+ async def get_spend_history(self, singleton_id: bytes32) -> List[Tuple[uint32, CoinSpend]]: # pragma: no cover
+ ret: List[
+ Tuple[uint32, CoinSpend]
+ ] = await self.wallet_state_manager.singleton_store.get_records_by_singleton_id(singleton_id)
+ if len(ret) == 0:
+ raise ValueError(f"No records found in singleton store for singleton id {singleton_id}")
+ return ret
+
+ async def apply_state_transition(self, new_state: CoinSpend, block_height: uint32) -> bool:
+ """
+ We are being notified of a singleton state transition. A Singleton has been spent.
+ Returns True iff the spend is a valid transition spend for the singleton, False otherwise.
+ """
+
+ self.log.info(
+ f"DAOWallet.apply_state_transition called with the height: {block_height} and CoinSpend of {new_state.coin.name()}."
+ )
+ singleton_id = get_singleton_id_from_puzzle(new_state.puzzle_reveal)
+ if not singleton_id: # pragma: no cover
+ raise ValueError("Received a non singleton coin for dao wallet")
+ tip: Optional[Tuple[uint32, SingletonRecord]] = await self.get_tip(singleton_id)
+ if tip is None: # pragma: no cover
+ # this is our first time, just store it
+ await self.wallet_state_manager.singleton_store.add_spend(self.wallet_id, new_state, block_height)
+ else:
+ assert isinstance(tip, SingletonRecord)
+ tip_spend = tip.parent_coinspend
+
+ tip_coin: Optional[Coin] = get_most_recent_singleton_coin_from_coin_spend(tip_spend)
+ assert tip_coin is not None
+ # TODO: Add check for pending transaction on our behalf in here
+ # if we have pending transaction that is now invalidated, then:
+ # check if we should auto re-create spend or flash error to use (should we have a failed tx db?)
+ await self.wallet_state_manager.singleton_store.add_spend(self.id(), new_state, block_height)
+
+ # Consume new DAOBlockchainInfo
+ # Determine if this is a treasury spend or a proposal spend
+ puzzle = get_inner_puzzle_from_singleton(new_state.puzzle_reveal)
+ assert puzzle
+ try:
+ mod, curried_args = puzzle.uncurry()
+ except ValueError as e: # pragma: no cover
+ self.log.warning("Cannot uncurry puzzle in DAO Wallet: error: %s", e)
+ raise e
+ if mod == DAO_TREASURY_MOD:
+ await self.update_treasury_info(new_state, block_height)
+ elif (mod == DAO_PROPOSAL_MOD) or (mod.uncurry()[0] == DAO_PROPOSAL_MOD):
+ await self.add_or_update_proposal_info(new_state, block_height)
+ elif mod == DAO_FINISHED_STATE:
+ await self.update_closed_proposal_coin(new_state, block_height)
+ else: # pragma: no cover
+ raise ValueError(f"Unsupported spend in DAO Wallet: {self.id()}")
+
+ return True
diff --git a/chia/wallet/did_wallet/did_wallet.py b/chia/wallet/did_wallet/did_wallet.py
--- a/chia/wallet/did_wallet/did_wallet.py
+++ b/chia/wallet/did_wallet/did_wallet.py
@@ -235,7 +235,7 @@ async def create_new_did_wallet_from_coin_spend(
None,
None,
False,
- json.dumps(did_wallet_puzzles.program_to_metadata(metadata)),
+ json.dumps(did_wallet_puzzles.did_program_to_metadata(metadata)),
)
self.check_existed_did()
info_as_string = json.dumps(self.did_info.to_json_dict())
@@ -404,7 +404,7 @@ async def coin_added(self, coin: Coin, _: uint32, peer: WSChiaConnection, parent
None,
None,
False,
- json.dumps(did_wallet_puzzles.program_to_metadata(did_data.metadata)),
+ json.dumps(did_wallet_puzzles.did_program_to_metadata(did_data.metadata)),
)
await self.save_info(new_info)
diff --git a/chia/wallet/did_wallet/did_wallet_puzzles.py b/chia/wallet/did_wallet/did_wallet_puzzles.py
--- a/chia/wallet/did_wallet/did_wallet_puzzles.py
+++ b/chia/wallet/did_wallet/did_wallet_puzzles.py
@@ -191,7 +191,7 @@ def metadata_to_program(metadata: Dict) -> Program:
return Program.to(kv_list)
-def program_to_metadata(program: Program) -> Dict:
+def did_program_to_metadata(program: Program) -> Dict:
"""
Convert a program to a metadata dict
:param program: Chialisp program contains the metadata
diff --git a/chia/wallet/nft_wallet/nft_puzzles.py b/chia/wallet/nft_wallet/nft_puzzles.py
--- a/chia/wallet/nft_wallet/nft_puzzles.py
+++ b/chia/wallet/nft_wallet/nft_puzzles.py
@@ -155,7 +155,7 @@ def metadata_to_program(metadata: Dict[bytes, Any]) -> Program:
return program
-def program_to_metadata(program: Program) -> Dict[bytes, Any]:
+def nft_program_to_metadata(program: Program) -> Dict[bytes, Any]:
"""
Convert a program to a metadata dict
:param program: Chialisp program contains the metadata
@@ -190,7 +190,7 @@ def update_metadata(metadata: Program, update_condition: Program) -> Program:
:param update_condition: Update metadata conditions
:return: Updated metadata
"""
- new_metadata: Dict[bytes, Any] = program_to_metadata(metadata)
+ new_metadata: Dict[bytes, Any] = nft_program_to_metadata(metadata)
uri: Program = update_condition.rest().rest().first()
prepend_value(uri.first().as_python(), uri.rest(), new_metadata)
return metadata_to_program(new_metadata)
diff --git a/chia/wallet/puzzles/tails.py b/chia/wallet/puzzles/tails.py
--- a/chia/wallet/puzzles/tails.py
+++ b/chia/wallet/puzzles/tails.py
@@ -2,6 +2,8 @@
from typing import Any, Dict, List, Optional, Tuple
+from chia_rs import Coin
+
from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.spend_bundle import SpendBundle
@@ -15,6 +17,7 @@
unsigned_spend_bundle_for_spendable_cats,
)
from chia.wallet.cat_wallet.lineage_store import CATLineageStore
+from chia.wallet.dao_wallet.dao_utils import create_cat_launcher_for_singleton_id
from chia.wallet.lineage_proof import LineageProof
from chia.wallet.payment import Payment
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
@@ -33,6 +36,9 @@
DELEGATED_LIMITATIONS_MOD = load_clvm_maybe_recompile(
"delegated_tail.clsp", package_or_requirement="chia.wallet.cat_wallet.puzzles"
)
+GENESIS_BY_ID_OR_SINGLETON_MOD = load_clvm_maybe_recompile(
+ "genesis_by_coin_id_or_singleton.clsp", package_or_requirement="chia.wallet.cat_wallet.puzzles"
+)
class LimitationsProgram:
@@ -202,6 +208,93 @@ def solve(args: List[Program], solution_dict: Dict) -> Program:
)
+class GenesisByIdOrSingleton(LimitationsProgram):
+ """
+ This TAIL allows for another TAIL to be used, as long as a signature of that TAIL's puzzlehash is included.
+ """
+
+ @staticmethod
+ def match(uncurried_mod: Program, curried_args: Program) -> Tuple[bool, List[Program]]: # pragma: no cover
+ if uncurried_mod == GENESIS_BY_ID_OR_SINGLETON_MOD:
+ genesis_id = curried_args.first()
+ return True, [genesis_id.as_atom()]
+ else:
+ return False, []
+
+ @staticmethod
+ def construct(args: List[Program]) -> Program:
+ return GENESIS_BY_ID_OR_SINGLETON_MOD.curry(
+ args[0],
+ args[1],
+ )
+
+ @staticmethod
+ def solve(args: List[Program], solution_dict: Dict) -> Program: # pragma: no cover
+ pid = hexstr_to_bytes(solution_dict["parent_coin_info"])
+ return Program.to([pid, solution_dict["amount"]])
+
+ @classmethod
+ async def generate_issuance_bundle(
+ cls, wallet, tail_info: Dict, amount: uint64, tx_config: TXConfig, fee: uint64 = uint64(0)
+ ) -> Tuple[TransactionRecord, SpendBundle]:
+ if "coins" in tail_info:
+ coins: List[Coin] = tail_info["coins"]
+ origin_id = coins.copy().pop().name()
+ else: # pragma: no cover
+ coins = await wallet.standard_wallet.select_coins(amount + fee, tx_config.coin_selection_config)
+ origin = coins.copy().pop()
+ origin_id = origin.name()
+
+ cat_inner: Program = await wallet.get_new_inner_puzzle()
+ # GENESIS_ID
+ # TREASURY_SINGLETON_STRUCT ; (SINGLETON_MOD_HASH, (LAUNCHER_ID, LAUNCHER_PUZZLE_HASH))
+ launcher_puzhash = create_cat_launcher_for_singleton_id(tail_info["treasury_id"]).get_tree_hash()
+ tail: Program = cls.construct(
+ [
+ Program.to(origin_id),
+ Program.to(launcher_puzhash),
+ ]
+ )
+
+ wallet.lineage_store = await CATLineageStore.create(
+ wallet.wallet_state_manager.db_wrapper, tail.get_tree_hash().hex()
+ )
+ await wallet.add_lineage(origin_id, LineageProof())
+
+ minted_cat_puzzle_hash: bytes32 = construct_cat_puzzle(CAT_MOD, tail.get_tree_hash(), cat_inner).get_tree_hash()
+
+ tx_records: List[TransactionRecord] = await wallet.standard_wallet.generate_signed_transaction(
+ amount, minted_cat_puzzle_hash, tx_config, fee, coins=set(coins), origin_id=origin_id
+ )
+ tx_record: TransactionRecord = tx_records[0]
+ assert tx_record.spend_bundle is not None
+ payment = Payment(cat_inner.get_tree_hash(), amount)
+ inner_solution = wallet.standard_wallet.add_condition_to_solution(
+ Program.to([51, 0, -113, tail, []]),
+ wallet.standard_wallet.make_solution(
+ primaries=[payment],
+ ),
+ )
+ eve_spend = unsigned_spend_bundle_for_spendable_cats(
+ CAT_MOD,
+ [
+ SpendableCAT(
+ list(filter(lambda a: a.amount == amount, tx_record.additions))[0],
+ tail.get_tree_hash(),
+ cat_inner,
+ inner_solution,
+ limitations_program_reveal=tail,
+ )
+ ],
+ )
+ signed_eve_spend = await wallet.sign(eve_spend)
+
+ if wallet.cat_info.my_tail is None:
+ await wallet.save_info(CATInfo(tail.get_tree_hash(), tail))
+
+ return tx_record, SpendBundle.aggregate([tx_record.spend_bundle, signed_eve_spend])
+
+
# This should probably be much more elegant than just a dictionary with strings as identifiers
# Right now this is small and experimental so it can stay like this
ALL_LIMITATIONS_PROGRAMS: Dict[str, Any] = {
@@ -209,6 +302,7 @@ def solve(args: List[Program], solution_dict: Dict) -> Program:
"genesis_by_puzhash": GenesisByPuzhash,
"everything_with_signature": EverythingWithSig,
"delegated_limitations": DelegatedLimitations,
+ "genesis_by_id_or_singleton": GenesisByIdOrSingleton,
}
diff --git a/chia/wallet/singleton.py b/chia/wallet/singleton.py
--- a/chia/wallet/singleton.py
+++ b/chia/wallet/singleton.py
@@ -1,9 +1,12 @@
from __future__ import annotations
-from typing import Optional
+from typing import List, Optional, Union
+from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.serialized_program import SerializedProgram
from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_spend import CoinSpend, compute_additions
from chia.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
from chia.wallet.util.curry_and_treehash import calculate_hash_of_quoted_mod_hash, curry_and_treehash
@@ -14,7 +17,7 @@
SINGLETON_LAUNCHER_PUZZLE_HASH = SINGLETON_LAUNCHER_PUZZLE.get_tree_hash()
-def get_inner_puzzle_from_singleton(puzzle: Program) -> Optional[Program]:
+def get_inner_puzzle_from_singleton(puzzle: Union[Program, SerializedProgram]) -> Optional[Program]:
"""
Extract the inner puzzle of a singleton
:param puzzle: Singleton puzzle
@@ -30,7 +33,23 @@ def get_inner_puzzle_from_singleton(puzzle: Program) -> Optional[Program]:
return Program(INNER_PUZZLE)
-def is_singleton(inner_f: Program) -> bool:
+def get_singleton_id_from_puzzle(puzzle: Union[Program, SerializedProgram]) -> Optional[bytes32]:
+ """
+ Extract the singleton ID from a singleton puzzle
+ :param puzzle: Singleton puzzle
+ :return: Inner puzzle
+ """
+ r = puzzle.uncurry()
+ if r is None:
+ return None # pragma: no cover
+ inner_f, args = r
+ if not is_singleton(inner_f):
+ return None
+ SINGLETON_STRUCT, INNER_PUZZLE = list(args.as_iter())
+ return bytes32(Program(SINGLETON_STRUCT).rest().first().as_atom())
+
+
+def is_singleton(inner_f: Union[Program, SerializedProgram]) -> bool:
"""
Check if a puzzle is a singleton mod
:param inner_f: puzzle
@@ -52,7 +71,7 @@ def create_singleton_puzzle_hash(innerpuz_hash: bytes32, launcher_id: bytes32) -
return curry_and_treehash(SINGLETON_TOP_LAYER_MOD_HASH_QUOTED, singleton_struct.get_tree_hash(), innerpuz_hash)
-def create_singleton_puzzle(innerpuz: Program, launcher_id: bytes32) -> Program:
+def create_singleton_puzzle(innerpuz: Union[Program, SerializedProgram], launcher_id: bytes32) -> Program:
"""
Create a full Singleton puzzle
:param innerpuz: Singleton inner puzzle
@@ -62,3 +81,16 @@ def create_singleton_puzzle(innerpuz: Program, launcher_id: bytes32) -> Program:
# singleton_struct = (MOD_HASH . (LAUNCHER_ID . LAUNCHER_PUZZLE_HASH))
singleton_struct = Program.to((SINGLETON_TOP_LAYER_MOD_HASH, (launcher_id, SINGLETON_LAUNCHER_PUZZLE_HASH)))
return SINGLETON_TOP_LAYER_MOD.curry(singleton_struct, innerpuz)
+
+
+def get_most_recent_singleton_coin_from_coin_spend(coin_sol: CoinSpend) -> Optional[Coin]:
+ additions: List[Coin] = compute_additions(coin_sol)
+ for coin in additions:
+ if coin.amount % 2 == 1:
+ return coin
+ return None # pragma: no cover
+
+
+def get_singleton_struct_for_id(id: bytes32) -> Program:
+ singleton_struct: Program = Program.to((SINGLETON_TOP_LAYER_MOD_HASH, (id, SINGLETON_LAUNCHER_PUZZLE_HASH)))
+ return singleton_struct
diff --git a/chia/wallet/singleton_record.py b/chia/wallet/singleton_record.py
new file mode 100644
--- /dev/null
+++ b/chia/wallet/singleton_record.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Any, Optional
+
+from chia.types.blockchain_format.coin import Coin
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_spend import CoinSpend
+from chia.util.ints import uint32
+from chia.wallet.lineage_proof import LineageProof
+
+
+@dataclass(frozen=True)
+class SingletonRecord:
+ """
+ These are values that correspond to a singleton in the WalletSingletonStore
+ """
+
+ coin: Coin
+ singleton_id: bytes32
+ wallet_id: uint32
+ parent_coinspend: CoinSpend
+ inner_puzzle_hash: Optional[bytes32]
+ pending: bool
+ removed_height: int
+ lineage_proof: LineageProof
+ custom_data: Optional[Any]
+
+ def name(self) -> bytes32: # pragma: no cover
+ return self.coin.name()
diff --git a/chia/wallet/util/wallet_types.py b/chia/wallet/util/wallet_types.py
--- a/chia/wallet/util/wallet_types.py
+++ b/chia/wallet/util/wallet_types.py
@@ -26,6 +26,8 @@ class WalletType(IntEnum):
DATA_LAYER = 11
DATA_LAYER_OFFER = 12
VC = 13
+ DAO = 14
+ DAO_CAT = 15
CRCAT = 57
diff --git a/chia/wallet/wallet_node.py b/chia/wallet/wallet_node.py
--- a/chia/wallet/wallet_node.py
+++ b/chia/wallet/wallet_node.py
@@ -283,6 +283,7 @@ async def reset_sync_db(self, db_path: Union[Path, str], fingerprint: int) -> bo
"trade_record_times",
"tx_times",
"pool_state_transitions",
+ "singletons",
"singleton_records",
"mirrors",
"launchers",
diff --git a/chia/wallet/wallet_singleton_store.py b/chia/wallet/wallet_singleton_store.py
new file mode 100644
--- /dev/null
+++ b/chia/wallet/wallet_singleton_store.py
@@ -0,0 +1,260 @@
+from __future__ import annotations
+
+import json
+import logging
+from sqlite3 import Row
+from typing import List, Optional, Type, TypeVar, Union
+
+from clvm.casts import int_from_bytes
+
+from chia.consensus.default_constants import DEFAULT_CONSTANTS
+from chia.types.blockchain_format.coin import Coin
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_spend import CoinSpend
+from chia.types.condition_opcodes import ConditionOpcode
+from chia.util.condition_tools import conditions_dict_for_solution
+from chia.util.db_wrapper import DBWrapper2, execute_fetchone
+from chia.util.ints import uint32
+from chia.wallet import singleton
+from chia.wallet.lineage_proof import LineageProof
+from chia.wallet.singleton import get_inner_puzzle_from_singleton, get_singleton_id_from_puzzle
+from chia.wallet.singleton_record import SingletonRecord
+
+log = logging.getLogger(__name__)
+_T_WalletSingletonStore = TypeVar("_T_WalletSingletonStore", bound="WalletSingletonStore")
+
+
+class WalletSingletonStore:
+ db_wrapper: DBWrapper2
+
+ @classmethod
+ async def create(cls: Type[_T_WalletSingletonStore], wrapper: DBWrapper2) -> _T_WalletSingletonStore:
+ self = cls()
+ self.db_wrapper = wrapper
+
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ await conn.execute(
+ (
+ "CREATE TABLE IF NOT EXISTS singletons("
+ "coin_id blob PRIMARY KEY,"
+ " coin text,"
+ " singleton_id blob,"
+ " wallet_id int,"
+ " parent_coin_spend blob,"
+ " inner_puzzle_hash blob,"
+ " pending tinyint,"
+ " removed_height int,"
+ " lineage_proof blob,"
+ " custom_data blob)"
+ )
+ )
+
+ await conn.execute("CREATE INDEX IF NOT EXISTS removed_height_index on singletons(removed_height)")
+
+ return self
+
+ async def save_singleton(self, record: SingletonRecord) -> None:
+ singleton_id = singleton.get_singleton_id_from_puzzle(record.parent_coinspend.puzzle_reveal)
+ if singleton_id is None: # pragma: no cover
+ raise RuntimeError(
+ "Failed to derive Singleton ID from puzzle reveal in parent spend %s", record.parent_coinspend
+ )
+ pending_int = 0
+ if record.pending:
+ pending_int = 1
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ columns = (
+ "coin_id, coin, singleton_id, wallet_id, parent_coin_spend, inner_puzzle_hash, "
+ "pending, removed_height, lineage_proof, custom_data"
+ )
+ await conn.execute(
+ f"INSERT or REPLACE INTO singletons ({columns}) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
+ (
+ record.coin.name().hex(),
+ json.dumps(record.coin.to_json_dict()),
+ singleton_id.hex(),
+ record.wallet_id,
+ bytes(record.parent_coinspend),
+ record.inner_puzzle_hash,
+ pending_int,
+ record.removed_height,
+ bytes(record.lineage_proof),
+ record.custom_data,
+ ),
+ )
+
+ async def add_spend(
+ self,
+ wallet_id: uint32,
+ coin_state: CoinSpend,
+ block_height: uint32 = uint32(0),
+ pending: bool = True,
+ ) -> None:
+ """Given a coin spend of a singleton, attempt to calculate the child coin and details
+ for the new singleton record. Add the new record to the store and remove the old record
+ if it exists
+ """
+ # get singleton_id from puzzle_reveal
+ singleton_id = get_singleton_id_from_puzzle(coin_state.puzzle_reveal)
+ if not singleton_id:
+ raise RuntimeError("Coin to add is not a valid singleton")
+
+ # get details for singleton record
+ conditions = conditions_dict_for_solution(
+ coin_state.puzzle_reveal.to_program(),
+ coin_state.solution.to_program(),
+ DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM,
+ )
+
+ cc_cond = [cond for cond in conditions[ConditionOpcode.CREATE_COIN] if int_from_bytes(cond.vars[1]) % 2 == 1][0]
+
+ coin = Coin(coin_state.coin.name(), cc_cond.vars[0], int_from_bytes(cc_cond.vars[1]))
+ inner_puz = get_inner_puzzle_from_singleton(coin_state.puzzle_reveal)
+ if inner_puz is None: # pragma: no cover
+ raise RuntimeError("Could not get inner puzzle from puzzle reveal in coin spend %s", coin_state)
+
+ lineage_bytes = [x.as_atom() for x in coin_state.solution.to_program().first().as_iter()]
+ if len(lineage_bytes) == 2:
+ lineage_proof = LineageProof(lineage_bytes[0], None, int_from_bytes(lineage_bytes[1]))
+ else:
+ lineage_proof = LineageProof(lineage_bytes[0], lineage_bytes[1], int_from_bytes(lineage_bytes[2]))
+ # Create and save the new singleton record
+ new_record = SingletonRecord(
+ coin, singleton_id, wallet_id, coin_state, inner_puz.get_tree_hash(), pending, 0, lineage_proof, None
+ )
+ await self.save_singleton(new_record)
+ # check if coin is in DB and mark deleted if found
+ current_records = await self.get_records_by_coin_id(coin_state.coin.name())
+ if len(current_records) > 0:
+ await self.delete_singleton_by_coin_id(coin_state.coin.name(), block_height)
+ return
+
+ def _to_singleton_record(self, row: Row) -> SingletonRecord:
+ return SingletonRecord(
+ coin=Coin.from_json_dict(json.loads(row[1])),
+ singleton_id=bytes32.from_hexstr(row[2]),
+ wallet_id=uint32(row[3]),
+ parent_coinspend=CoinSpend.from_bytes(row[4]),
+ inner_puzzle_hash=bytes32.from_bytes(row[5]), # inner puz hash
+ pending=True if row[6] == 1 else False,
+ removed_height=uint32(row[7]),
+ lineage_proof=LineageProof.from_bytes(row[8]),
+ custom_data=row[9],
+ )
+
+ async def delete_singleton_by_singleton_id(self, singleton_id: bytes32, height: uint32) -> bool:
+ """Tries to mark a given singleton as deleted at specific height
+
+ This is due to how re-org works
+ Returns `True` if singleton was found and marked deleted or `False` if not."""
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ cursor = await conn.execute(
+ "UPDATE singletons SET removed_height=? WHERE singleton_id=?", (int(height), singleton_id.hex())
+ )
+ if cursor.rowcount > 0:
+ log.info("Deleted singleton with singleton id: %s", singleton_id.hex())
+ return True
+ log.warning("Couldn't find singleton with singleton id to delete: %s", singleton_id.hex())
+ return False
+
+ async def delete_singleton_by_coin_id(self, coin_id: bytes32, height: uint32) -> bool:
+ """Tries to mark a given singleton as deleted at specific height
+
+ This is due to how re-org works
+ Returns `True` if singleton was found and marked deleted or `False` if not."""
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ cursor = await conn.execute(
+ "UPDATE singletons SET removed_height=? WHERE coin_id=?", (int(height), coin_id.hex())
+ )
+ if cursor.rowcount > 0:
+ log.info("Deleted singleton with coin id: %s", coin_id.hex())
+ return True
+ log.warning("Couldn't find singleton with coin id to delete: %s", coin_id.hex())
+ return False
+
+ async def delete_wallet(self, wallet_id: uint32) -> None:
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ cursor = await conn.execute("DELETE FROM singletons WHERE wallet_id=?", (wallet_id,))
+ await cursor.close()
+
+ async def update_pending_transaction(self, coin_id: bytes32, pending: bool) -> bool:
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ c = await conn.execute(
+ "UPDATE singletons SET pending=? WHERE coin_id = ?",
+ (pending, coin_id.hex()),
+ )
+ return c.rowcount > 0
+
+ async def get_records_by_wallet_id(self, wallet_id: int) -> List[SingletonRecord]:
+ """
+ Retrieves all entries for a wallet ID.
+ """
+
+ async with self.db_wrapper.reader_no_transaction() as conn:
+ rows = await conn.execute_fetchall(
+ "SELECT * FROM singletons WHERE wallet_id = ? ORDER BY removed_height",
+ (wallet_id,),
+ )
+ return [self._to_singleton_record(row) for row in rows]
+
+ async def get_records_by_coin_id(self, coin_id: bytes32) -> List[SingletonRecord]:
+ """
+ Retrieves all entries for a coin ID.
+ """
+
+ async with self.db_wrapper.reader_no_transaction() as conn:
+ rows = await conn.execute_fetchall(
+ "SELECT * FROM singletons WHERE coin_id = ?",
+ (coin_id.hex(),),
+ )
+ return [self._to_singleton_record(row) for row in rows]
+
+ async def get_records_by_singleton_id(self, singleton_id: bytes32) -> List[SingletonRecord]:
+ """
+ Retrieves all entries for a singleton ID.
+ """
+
+ async with self.db_wrapper.reader_no_transaction() as conn:
+ rows = await conn.execute_fetchall(
+ "SELECT * FROM singletons WHERE singleton_id = ? ORDER BY removed_height",
+ (singleton_id.hex(),),
+ )
+ return [self._to_singleton_record(row) for row in rows]
+
+ async def rollback(self, height: int, wallet_id_arg: int) -> None:
+ """
+ Rollback removes all entries which have entry_height > height passed in. Note that this is not committed to the
+ DB until db_wrapper.commit() is called. However, it is written to the cache, so it can be fetched with
+ get_all_state_transitions.
+ """
+
+ async with self.db_wrapper.writer_maybe_transaction() as conn:
+ cursor = await conn.execute(
+ "DELETE FROM singletons WHERE removed_height>? AND wallet_id=?", (height, wallet_id_arg)
+ )
+ await cursor.close()
+
+ async def count(self, wallet_id: Optional[uint32] = None) -> int:
+ sql = "SELECT COUNT(singleton_id) FROM singletons WHERE removed_height=0"
+ params: List[uint32] = []
+ if wallet_id is not None:
+ sql += " AND wallet_id=?"
+ params.append(wallet_id)
+ async with self.db_wrapper.reader_no_transaction() as conn:
+ count_row = await execute_fetchone(conn, sql, params)
+ if count_row:
+ return int(count_row[0])
+ return -1 # pragma: no cover
+
+ async def is_empty(self, wallet_id: Optional[uint32] = None) -> bool:
+ sql = "SELECT 1 FROM singletons WHERE removed_height=0"
+ params: List[Union[uint32, bytes32]] = []
+ if wallet_id is not None:
+ sql += " AND wallet_id=?"
+ params.append(wallet_id)
+ sql += " LIMIT 1"
+ async with self.db_wrapper.reader_no_transaction() as conn:
+ count_row = await execute_fetchone(conn, sql, params)
+ if count_row:
+ return False
+ return True
diff --git a/chia/wallet/wallet_state_manager.py b/chia/wallet/wallet_state_manager.py
--- a/chia/wallet/wallet_state_manager.py
+++ b/chia/wallet/wallet_state_manager.py
@@ -7,7 +7,21 @@
import traceback
from contextlib import asynccontextmanager
from pathlib import Path
-from typing import TYPE_CHECKING, Any, AsyncIterator, Callable, Dict, List, Optional, Set, Tuple, Type, TypeVar, Union
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ AsyncIterator,
+ Callable,
+ Dict,
+ Iterator,
+ List,
+ Optional,
+ Set,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+)
import aiosqlite
from blspy import G1Element, G2Element, PrivateKey
@@ -50,7 +64,17 @@
from chia.wallet.cat_wallet.cat_info import CATCoinData, CATInfo, CRCATInfo
from chia.wallet.cat_wallet.cat_utils import CAT_MOD, CAT_MOD_HASH, construct_cat_puzzle, match_cat_puzzle
from chia.wallet.cat_wallet.cat_wallet import CATWallet
+from chia.wallet.cat_wallet.dao_cat_wallet import DAOCATWallet
from chia.wallet.conditions import Condition, ConditionValidTimes, parse_timelock_info
+from chia.wallet.dao_wallet.dao_utils import (
+ get_p2_singleton_puzhash,
+ match_dao_cat_puzzle,
+ match_finished_puzzle,
+ match_funding_puzzle,
+ match_proposal_puzzle,
+ match_treasury_puzzle,
+)
+from chia.wallet.dao_wallet.dao_wallet import DAOWallet
from chia.wallet.db_wallet.db_wallet_puzzles import MIRROR_PUZZLE_HASH
from chia.wallet.derivation_record import DerivationRecord
from chia.wallet.derive_keys import (
@@ -80,7 +104,7 @@
puzzle_hash_for_synthetic_public_key,
)
from chia.wallet.sign_coin_spends import sign_coin_spends
-from chia.wallet.singleton import create_singleton_puzzle, get_inner_puzzle_from_singleton
+from chia.wallet.singleton import create_singleton_puzzle, get_inner_puzzle_from_singleton, get_singleton_id_from_puzzle
from chia.wallet.trade_manager import TradeManager
from chia.wallet.trading.trade_status import TradeStatus
from chia.wallet.transaction_record import TransactionRecord
@@ -114,6 +138,7 @@
from chia.wallet.wallet_protocol import WalletProtocol
from chia.wallet.wallet_puzzle_store import WalletPuzzleStore
from chia.wallet.wallet_retry_store import WalletRetryStore
+from chia.wallet.wallet_singleton_store import WalletSingletonStore
from chia.wallet.wallet_transaction_store import WalletTransactionStore
from chia.wallet.wallet_user_store import WalletUserStore
@@ -127,6 +152,8 @@
class WalletStateManager:
+ interested_ph_cache: Dict[bytes32, List[int]] = {}
+ interested_coin_cache: Dict[bytes32, List[int]] = {}
constants: ConsensusConstants
config: Dict[str, Any]
tx_store: WalletTransactionStore
@@ -165,6 +192,7 @@ class WalletStateManager:
wallet_node: WalletNode
pool_store: WalletPoolStore
dl_store: DataLayerStore
+ singleton_store: WalletSingletonStore
default_cats: Dict[str, Any]
asset_to_wallet_map: Dict[AssetType, Any]
initial_num_public_keys: int
@@ -181,6 +209,7 @@ async def create(
wallet_node: WalletNode,
) -> WalletStateManager:
self = WalletStateManager()
+
self.config = config
self.constants = constants
self.server = server
@@ -219,6 +248,7 @@ async def create(
self.dl_store = await DataLayerStore.create(self.db_wrapper)
self.interested_store = await WalletInterestedStore.create(self.db_wrapper)
self.retry_store = await WalletRetryStore.create(self.db_wrapper)
+ self.singleton_store = await WalletSingletonStore.create(self.db_wrapper)
self.default_cats = DEFAULT_CATS
self.wallet_node = wallet_node
@@ -273,8 +303,23 @@ async def create(
self.main_wallet,
wallet_info,
)
- elif wallet_type == WalletType.DATA_LAYER:
- wallet = await DataLayerWallet.create(self, wallet_info)
+ elif wallet_type == WalletType.DATA_LAYER: # pragma: no cover
+ wallet = await DataLayerWallet.create(
+ self,
+ wallet_info,
+ )
+ elif wallet_type == WalletType.DAO: # pragma: no cover
+ wallet = await DAOWallet.create(
+ self,
+ self.main_wallet,
+ wallet_info,
+ )
+ elif wallet_type == WalletType.DAO_CAT: # pragma: no cover
+ wallet = await DAOCATWallet.create(
+ self,
+ self.main_wallet,
+ wallet_info,
+ )
elif wallet_type == WalletType.VC: # pragma: no cover
wallet = await VCWallet.create(
self,
@@ -712,8 +757,42 @@ async def determine_coin_type(
coin_spend = await fetch_coin_spend_for_coin_state(parent_coin_state, peer)
puzzle = Program.from_bytes(bytes(coin_spend.puzzle_reveal))
+ solution = Program.from_bytes(bytes(coin_spend.solution))
+
uncurried = uncurry_puzzle(puzzle)
+ dao_ids = []
+ wallets = self.wallets.values()
+ for wallet in wallets:
+ if wallet.type() == WalletType.DAO.value:
+ assert isinstance(wallet, DAOWallet)
+ dao_ids.append(wallet.dao_info.treasury_id)
+ funding_puzzle_check = match_funding_puzzle(uncurried, solution, coin_state.coin, dao_ids)
+ if funding_puzzle_check:
+ return await self.get_dao_wallet_from_coinspend_hint(coin_spend, coin_state), None
+
+ # Check if the coin is a DAO Treasury
+ dao_curried_args = match_treasury_puzzle(uncurried.mod, uncurried.args)
+ if dao_curried_args is not None:
+ return await self.handle_dao_treasury(dao_curried_args, parent_coin_state, coin_state, coin_spend), None
+ # Check if the coin is a Proposal and that it isn't the timer coin (amount == 0)
+ dao_curried_args = match_proposal_puzzle(uncurried.mod, uncurried.args)
+ if (dao_curried_args is not None) and (coin_state.coin.amount != 0):
+ return await self.handle_dao_proposal(dao_curried_args, parent_coin_state, coin_state, coin_spend), None
+
+ # Check if the coin is a finished proposal
+ dao_curried_args = match_finished_puzzle(uncurried.mod, uncurried.args)
+ if dao_curried_args is not None:
+ return (
+ await self.handle_dao_finished_proposals(dao_curried_args, parent_coin_state, coin_state, coin_spend),
+ None,
+ )
+
+ # Check if the coin is a DAO CAT
+ dao_cat_args = match_dao_cat_puzzle(uncurried)
+ if dao_cat_args:
+ return await self.handle_dao_cat(dao_cat_args, parent_coin_state, coin_state, coin_spend), None
+
# Check if the coin is a CAT
cat_curried_args = match_cat_puzzle(uncurried)
if cat_curried_args is not None:
@@ -868,6 +947,8 @@ async def spend_clawback_coins(
)
coin_spend: CoinSpend = generate_clawback_spend_bundle(coin, metadata, inner_puzzle, inner_solution)
coin_spends.append(coin_spend)
+ # Update incoming tx to prevent double spend and mark it is pending
+ await self.tx_store.increment_sent(incoming_tx.name, "", MempoolInclusionStatus.PENDING, None)
except Exception as e:
self.log.error(f"Failed to create clawback spend bundle for {coin.name().hex()}: {e}")
if len(coin_spends) == 0:
@@ -900,9 +981,6 @@ async def spend_clawback_coins(
valid_times=parse_timelock_info(extra_conditions),
)
await self.add_pending_transaction(tx_record)
- # Update incoming tx to prevent double spend and mark it is pending
- for coin_spend in coin_spends:
- await self.tx_store.increment_sent(coin_spend.coin.name(), "", MempoolInclusionStatus.PENDING, None)
return [tx_record.name]
async def filter_spam(self, new_coin_state: List[CoinState]) -> List[CoinState]:
@@ -939,6 +1017,25 @@ async def is_standard_wallet_tx(self, coin_state: CoinState) -> bool:
wallet_identifier = await self.get_wallet_identifier_for_puzzle_hash(coin_state.coin.puzzle_hash)
return wallet_identifier is not None and wallet_identifier.type == WalletType.STANDARD_WALLET
+ async def handle_dao_cat(
+ self,
+ curried_args: Iterator[Program],
+ parent_coin_state: CoinState,
+ coin_state: CoinState,
+ coin_spend: CoinSpend,
+ ) -> Optional[WalletIdentifier]:
+ """
+ Handle the new coin when it is a DAO CAT
+ """
+ mod_hash, tail_hash, inner_puzzle = curried_args
+ asset_id: bytes32 = bytes32(bytes(tail_hash)[1:])
+ for wallet in self.wallets.values():
+ if wallet.type() == WalletType.DAO_CAT:
+ assert isinstance(wallet, DAOCATWallet)
+ if wallet.dao_cat_info.limitations_program_hash == asset_id:
+ return WalletIdentifier.create(wallet)
+ return None # pragma: no cover
+
async def handle_cat(
self,
parent_data: CATCoinData,
@@ -1072,6 +1169,7 @@ async def handle_did(
if derivation_record is None:
self.log.info(f"Received state for the coin that doesn't belong to us {coin_state}")
# Check if it was owned by us
+ # If the puzzle inside is no longer recognised then delete the wallet associated
removed_wallet_ids = []
for wallet in self.wallets.values():
if not isinstance(wallet, DIDWallet):
@@ -1181,6 +1279,94 @@ async def get_minter_did(self, launcher_coin: Coin, peer: WSChiaConnection) -> O
minter_did = bytes32(bytes(singleton_struct.rest().first())[1:])
return minter_did
+ async def handle_dao_treasury(
+ self,
+ uncurried_args: Iterator[Program],
+ parent_coin_state: CoinState,
+ coin_state: CoinState,
+ coin_spend: CoinSpend,
+ ) -> Optional[WalletIdentifier]:
+ self.log.info("Entering dao_treasury handling in WalletStateManager")
+ singleton_id = get_singleton_id_from_puzzle(coin_spend.puzzle_reveal)
+ for wallet in self.wallets.values():
+ if wallet.type() == WalletType.DAO:
+ assert isinstance(wallet, DAOWallet)
+ if wallet.dao_info.treasury_id == singleton_id:
+ return WalletIdentifier.create(wallet)
+
+ # TODO: If we can't find the wallet for this DAO but we've got here because we're subscribed,
+ # then create the wallet. (see early in dao-wallet commits for how to do this)
+ return None # pragma: no cover
+
+ async def handle_dao_proposal(
+ self,
+ uncurried_args: Iterator[Program],
+ parent_coin_state: CoinState,
+ coin_state: CoinState,
+ coin_spend: CoinSpend,
+ ) -> Optional[WalletIdentifier]:
+ (
+ # ; second hash
+ SELF_HASH,
+ PROPOSAL_ID,
+ PROPOSED_PUZ_HASH,
+ YES_VOTES,
+ TOTAL_VOTES,
+ # ; first hash
+ PROPOSAL_TIMER_MOD_HASH,
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_PUZHASH,
+ CAT_MOD_HASH,
+ DAO_FINISHED_STATE_MOD_HASH,
+ TREASURY_MOD_HASH,
+ LOCKUP_SELF_HASH,
+ CAT_TAIL_HASH,
+ TREASURY_ID,
+ ) = uncurried_args
+ for wallet in self.wallets.values():
+ if wallet.type() == WalletType.DAO:
+ assert isinstance(wallet, DAOWallet)
+ if wallet.dao_info.treasury_id == TREASURY_ID.as_atom():
+ assert isinstance(coin_state.created_height, int)
+ await wallet.add_or_update_proposal_info(coin_spend, uint32(coin_state.created_height))
+ return WalletIdentifier.create(wallet)
+ return None # pragma: no cover
+
+ async def handle_dao_finished_proposals(
+ self,
+ uncurried_args: Iterator[Program],
+ parent_coin_state: CoinState,
+ coin_state: CoinState,
+ coin_spend: CoinSpend,
+ ) -> Optional[WalletIdentifier]:
+ if coin_state.created_height is None: # pragma: no cover
+ raise ValueError("coin_state argument to handle_dao_finished_proposals cannot have created_height of None")
+ (
+ SINGLETON_STRUCT, # (SINGLETON_MOD_HASH, (SINGLETON_ID, LAUNCHER_PUZZLE_HASH))
+ FINISHED_STATE_MOD_HASH,
+ ) = uncurried_args
+ proposal_id = SINGLETON_STRUCT.rest().first().as_atom()
+ for wallet in self.wallets.values():
+ if wallet.type() == WalletType.DAO:
+ assert isinstance(wallet, DAOWallet)
+ for proposal_info in wallet.dao_info.proposals_list:
+ if proposal_info.proposal_id == proposal_id:
+ await wallet.add_or_update_proposal_info(coin_spend, uint32(coin_state.created_height))
+ return WalletIdentifier.create(wallet)
+ return None
+
+ async def get_dao_wallet_from_coinspend_hint(
+ self, coin_spend: CoinSpend, coin_state: CoinState
+ ) -> Optional[WalletIdentifier]:
+ hinted_coin = compute_spend_hints_and_additions(coin_spend)[coin_state.coin.name()]
+ if hinted_coin:
+ for wallet in self.wallets.values():
+ if wallet.type() == WalletType.DAO.value:
+ assert isinstance(wallet, DAOWallet)
+ if get_p2_singleton_puzhash(wallet.dao_info.treasury_id) == hinted_coin.hint:
+ return WalletIdentifier.create(wallet)
+ return None
+
async def handle_nft(
self,
nft_data: NFTCoinData,
@@ -1658,6 +1844,7 @@ async def _add_coin_states(
if record.coin_type == CoinType.CLAWBACK:
await self.interested_store.remove_interested_coin_id(coin_state.coin.name())
confirmed_tx_records: List[TransactionRecord] = []
+
for tx_record in all_unconfirmed:
if tx_record.type in CLAWBACK_INCOMING_TRANSACTION_TYPES:
for add_coin in tx_record.additions:
@@ -1678,14 +1865,17 @@ async def _add_coin_states(
unconfirmed_record.name, uint32(coin_state.spent_height)
)
- if record.wallet_type == WalletType.POOLING_WALLET:
+ if record.wallet_type in [WalletType.POOLING_WALLET, WalletType.DAO]:
+ wallet_type_to_class = {WalletType.POOLING_WALLET: PoolWallet, WalletType.DAO: DAOWallet}
if coin_state.spent_height is not None and coin_state.coin.amount == uint64(1):
- pool_wallet = self.get_wallet(id=uint32(record.wallet_id), required_type=PoolWallet)
+ singleton_wallet: Union[PoolWallet, DAOWallet] = self.get_wallet(
+ id=uint32(record.wallet_id), required_type=wallet_type_to_class[record.wallet_type]
+ )
curr_coin_state: CoinState = coin_state
while curr_coin_state.spent_height is not None:
- cs = await fetch_coin_spend_for_coin_state(curr_coin_state, peer)
- success = await pool_wallet.apply_state_transition(
+ cs: CoinSpend = await fetch_coin_spend_for_coin_state(curr_coin_state, peer)
+ success = await singleton_wallet.apply_state_transition(
cs, uint32(curr_coin_state.spent_height)
)
if not success:
@@ -1977,10 +2167,14 @@ async def coin_added(
coin_record: WalletCoinRecord = WalletCoinRecord(
coin, height, uint32(0), False, coinbase, wallet_type, wallet_id
)
+
await self.coin_store.add_coin_record(coin_record, coin_name)
await self.wallets[wallet_id].coin_added(coin, height, peer, coin_data)
+ if wallet_type == WalletType.DAO:
+ return
+
await self.create_more_puzzle_hashes()
async def add_pending_transaction(self, tx_record: TransactionRecord) -> None:
@@ -2222,12 +2416,31 @@ async def new_peak(self, height: uint32) -> None:
self.tx_pending_changed()
async def add_interested_puzzle_hashes(self, puzzle_hashes: List[bytes32], wallet_ids: List[int]) -> None:
+ # TODO: It's unclear if the intended use for this is that each puzzle hash should store all
+ # the elements of wallet_ids. It only stores one wallet_id per puzzle hash in the interested_store
+ # but the coin_cache keeps all wallet_ids for each puzzle hash
+ for puzzle_hash in puzzle_hashes:
+ if puzzle_hash in self.interested_coin_cache:
+ wallet_ids_to_add = list(
+ set([w for w in wallet_ids if w not in self.interested_coin_cache[puzzle_hash]])
+ )
+ self.interested_coin_cache[puzzle_hash].extend(wallet_ids_to_add)
+ else:
+ self.interested_coin_cache[puzzle_hash] = list(set(wallet_ids))
for puzzle_hash, wallet_id in zip(puzzle_hashes, wallet_ids):
await self.interested_store.add_interested_puzzle_hash(puzzle_hash, wallet_id)
if len(puzzle_hashes) > 0:
await self.wallet_node.new_peak_queue.subscribe_to_puzzle_hashes(puzzle_hashes)
- async def add_interested_coin_ids(self, coin_ids: List[bytes32]) -> None:
+ async def add_interested_coin_ids(self, coin_ids: List[bytes32], wallet_ids: List[int] = []) -> None:
+ # TODO: FIX: wallet_ids is sometimes populated unexpectedly when called from add_pending_transaction
+ for coin_id in coin_ids:
+ if coin_id in self.interested_coin_cache:
+ # prevent repeated wallet_ids from appearing in the coin cache
+ wallet_ids_to_add = list(set([w for w in wallet_ids if w not in self.interested_coin_cache[coin_id]]))
+ self.interested_coin_cache[coin_id].extend(wallet_ids_to_add)
+ else:
+ self.interested_coin_cache[coin_id] = list(set(wallet_ids))
for coin_id in coin_ids:
await self.interested_store.add_interested_coin_id(coin_id)
if len(coin_ids) > 0:
| diff --git a/tests/cmds/wallet/test_dao.py b/tests/cmds/wallet/test_dao.py
new file mode 100644
--- /dev/null
+++ b/tests/cmds/wallet/test_dao.py
@@ -0,0 +1,531 @@
+from __future__ import annotations
+
+import time
+from pathlib import Path
+from secrets import token_bytes
+from typing import Any, Dict, List, Optional, Tuple, Union
+
+import pytest
+
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.util.bech32m import encode_puzzle_hash
+from chia.util.ints import uint8, uint32, uint64
+from chia.wallet.conditions import parse_timelock_info
+from chia.wallet.transaction_record import TransactionRecord
+from chia.wallet.util.transaction_type import TransactionType
+from chia.wallet.util.tx_config import TXConfig
+from chia.wallet.util.wallet_types import WalletType
+from tests.cmds.cmd_test_utils import TestRpcClients, TestWalletRpcClient, run_cli_command_and_assert
+from tests.cmds.wallet.test_consts import FINGERPRINT_ARG
+
+# DAO Commands
+
+
+def test_dao_create(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None:
+ test_rpc_clients, root_dir = get_test_cli_clients
+
+ # set RPC Client
+ class DAOCreateRpcClient(TestWalletRpcClient):
+ async def create_new_dao_wallet(
+ self,
+ mode: str,
+ tx_config: TXConfig,
+ dao_rules: Optional[Dict[str, uint64]] = None,
+ amount_of_cats: Optional[uint64] = None,
+ treasury_id: Optional[bytes32] = None,
+ filter_amount: uint64 = uint64(1),
+ name: Optional[str] = None,
+ fee: uint64 = uint64(0),
+ fee_for_cat: uint64 = uint64(0),
+ ) -> Dict[str, Union[str, int, bytes32]]:
+ if not treasury_id:
+ treasury_id = bytes32(token_bytes(32))
+ return {
+ "success": True,
+ "type": "DAO",
+ "wallet_id": 2,
+ "treasury_id": treasury_id,
+ "cat_wallet_id": 3,
+ "dao_cat_wallet_id": 4,
+ }
+
+ inst_rpc_client = DAOCreateRpcClient() # pylint: disable=no-value-for-parameter
+ test_rpc_clients.wallet_rpc_client = inst_rpc_client
+ command_args = [
+ "dao",
+ "create",
+ FINGERPRINT_ARG,
+ "-n test",
+ "--attendance-required",
+ "1000",
+ "--cat-amount",
+ "100000",
+ "-m0.1",
+ "--reuse",
+ ]
+ # these are various things that should be in the output
+ assert_list = ["Successfully created DAO Wallet", "DAO Wallet ID: 2", "CAT Wallet ID: 3", "DAOCAT Wallet ID: 4"]
+ run_cli_command_and_assert(capsys, root_dir, command_args, assert_list)
+
+ # Check command raises if proposal minimum is even
+ odd_pm_command_args = [
+ "dao",
+ "create",
+ FINGERPRINT_ARG,
+ "-n test",
+ "--attendance-required",
+ "1000",
+ "--cat-amount",
+ "100000",
+ "--proposal-minimum",
+ "10",
+ "-m0.1",
+ "--reuse",
+ ]
+ extra_assert_list = [
+ "Adding 1 mojo to proposal minimum amount",
+ ]
+ run_cli_command_and_assert(capsys, root_dir, odd_pm_command_args, extra_assert_list)
+
+ # Add wallet for existing DAO
+ add_command_args = [
+ "dao",
+ "add",
+ FINGERPRINT_ARG,
+ "-n test",
+ "-t",
+ bytes32(token_bytes(32)).hex(),
+ "--filter-amount",
+ "1",
+ ]
+ run_cli_command_and_assert(capsys, root_dir, add_command_args, assert_list)
+
+
+def test_dao_treasury(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None:
+ test_rpc_clients, root_dir = get_test_cli_clients
+
+ class DAOCreateRpcClient(TestWalletRpcClient):
+ async def dao_get_treasury_id(
+ self,
+ wallet_id: int,
+ ) -> Dict[str, str]:
+ return {"treasury_id": "0xCAFEF00D"}
+
+ async def dao_get_treasury_balance(self, wallet_id: int) -> Dict[str, Union[str, bool, Dict[str, int]]]:
+ if wallet_id == 2:
+ return {"success": True, "balances": {"xch": 1000000000000, "0xCAFEF00D": 10000000}}
+ else:
+ return {"success": True, "balances": {}}
+
+ async def dao_add_funds_to_treasury(
+ self,
+ wallet_id: int,
+ funding_wallet_id: int,
+ amount: uint64,
+ tx_config: TXConfig,
+ fee: uint64 = uint64(0),
+ reuse_puzhash: Optional[bool] = None,
+ ) -> Dict[str, Union[str, bool]]:
+ return {"success": True, "tx_id": bytes32(b"1" * 32).hex()}
+
+ async def dao_get_rules(
+ self,
+ wallet_id: int,
+ ) -> Dict[str, Dict[str, int]]:
+ return {"rules": {"proposal_minimum": 100}}
+
+ async def get_transaction(self, wallet_id: int, transaction_id: bytes32) -> TransactionRecord:
+ return TransactionRecord(
+ confirmed_at_height=uint32(0),
+ created_at_time=uint64(int(time.time())),
+ to_puzzle_hash=bytes32(b"2" * 32),
+ amount=uint64(10),
+ fee_amount=uint64(1),
+ confirmed=True,
+ sent=uint32(10),
+ spend_bundle=None,
+ additions=[],
+ removals=[],
+ wallet_id=uint32(1),
+ sent_to=[("peer1", uint8(1), None)],
+ trade_id=None,
+ type=uint32(TransactionType.INCOMING_TX.value),
+ name=bytes32(token_bytes()),
+ memos=[],
+ valid_times=parse_timelock_info(tuple()),
+ )
+
+ inst_rpc_client = DAOCreateRpcClient() # pylint: disable=no-value-for-parameter
+ test_rpc_clients.wallet_rpc_client = inst_rpc_client
+
+ get_id_args = ["dao", "get_id", FINGERPRINT_ARG, "-i 2"]
+ get_id_asserts = ["Treasury ID: 0xCAFEF00D"]
+ run_cli_command_and_assert(capsys, root_dir, get_id_args, get_id_asserts)
+
+ get_balance_args = ["dao", "balance", FINGERPRINT_ARG, "-i 2"]
+ get_balance_asserts = ["XCH: 1.0", "0xCAFEF00D: 10000.0"]
+ run_cli_command_and_assert(capsys, root_dir, get_balance_args, get_balance_asserts)
+
+ no_balance_args = ["dao", "balance", FINGERPRINT_ARG, "-i 3"]
+ no_balance_asserts = ["The DAO treasury currently has no funds"]
+ run_cli_command_and_assert(capsys, root_dir, no_balance_args, no_balance_asserts)
+
+ add_funds_args = ["dao", "add_funds", FINGERPRINT_ARG, "-i 2", "-w 1", "-a", "10", "-m 0.1", "--reuse"]
+ add_funds_asserts = [
+ "Transaction submitted to nodes",
+ ]
+ run_cli_command_and_assert(capsys, root_dir, add_funds_args, add_funds_asserts)
+
+ rules_args = ["dao", "rules", FINGERPRINT_ARG, "-i 2"]
+ rules_asserts = "proposal_minimum: 100"
+ run_cli_command_and_assert(capsys, root_dir, rules_args, rules_asserts)
+
+
+def test_dao_proposals(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None:
+ test_rpc_clients, root_dir = get_test_cli_clients
+
+ # set RPC Client
+ class DAOCreateRpcClient(TestWalletRpcClient):
+ async def dao_get_proposals(
+ self,
+ wallet_id: int,
+ include_closed: bool = True,
+ ) -> Dict[str, Union[bool, int, List[Any]]]:
+ proposal = {
+ "proposal_id": "0xCAFEF00D",
+ "amount_voted": uint64(10),
+ "yes_votes": uint64(10),
+ "passed": True,
+ "closed": True,
+ }
+ proposal_2 = {
+ "proposal_id": "0xFEEDBEEF",
+ "amount_voted": uint64(120),
+ "yes_votes": uint64(100),
+ "passed": True,
+ "closed": False,
+ }
+ return {
+ "success": True,
+ "proposals": [proposal, proposal_2],
+ "proposal_timelock": 5,
+ "soft_close_length": 10,
+ }
+
+ async def dao_parse_proposal(
+ self,
+ wallet_id: int,
+ proposal_id: str,
+ ) -> Dict[str, Union[bool, Dict[str, Any]]]:
+ if proposal_id == "0xCAFEF00D":
+ puzhash = bytes32(b"1" * 32).hex()
+ asset_id = bytes32(b"2" * 32).hex()
+ proposal_details: Dict[str, Any] = {
+ "proposal_type": "s",
+ "xch_conditions": [{"puzzle_hash": puzhash, "amount": 100}],
+ "asset_conditions": [
+ {"asset_id": asset_id, "conditions": [{"puzzle_hash": puzhash, "amount": 123}]}
+ ],
+ }
+ elif proposal_id == "0xFEEDBEEF":
+ proposal_details = {
+ "proposal_type": "u",
+ "dao_rules": {
+ "proposal_timelock": 10,
+ "soft_close_length": 50,
+ },
+ }
+ else:
+ proposal_details = {
+ "proposal_type": "s",
+ "mint_amount": 1000,
+ "new_cat_puzhash": bytes32(b"x" * 32).hex(),
+ }
+ proposal_state = {
+ "state": {
+ "passed": False,
+ "closable": False,
+ "closed": False,
+ "total_votes_needed": 10,
+ "yes_votes_needed": 20,
+ "blocks_needed": 30,
+ }
+ }
+ proposal_dict = {**proposal_state, **proposal_details}
+ return {"success": True, "proposal_dictionary": proposal_dict}
+
+ async def dao_vote_on_proposal(
+ self,
+ wallet_id: int,
+ proposal_id: str,
+ vote_amount: int,
+ tx_config: TXConfig,
+ is_yes_vote: bool,
+ fee: uint64 = uint64(0),
+ ) -> Dict[str, Union[str, bool]]:
+ return {"success": True, "tx_id": bytes32(b"1" * 32).hex()}
+
+ async def dao_close_proposal(
+ self,
+ wallet_id: int,
+ proposal_id: str,
+ tx_config: TXConfig,
+ fee: uint64 = uint64(0),
+ self_destruct: bool = False,
+ reuse_puzhash: Optional[bool] = None,
+ ) -> Dict[str, Union[str, bool]]:
+ return {"success": True, "tx_id": bytes32(b"1" * 32).hex()}
+
+ async def dao_create_proposal(
+ self,
+ wallet_id: int,
+ proposal_type: str,
+ tx_config: TXConfig,
+ additions: Optional[List[Dict[str, Any]]] = None,
+ amount: Optional[uint64] = None,
+ inner_address: Optional[str] = None,
+ asset_id: Optional[str] = None,
+ cat_target_address: Optional[str] = None,
+ vote_amount: Optional[int] = None,
+ new_dao_rules: Optional[Dict[str, uint64]] = None,
+ fee: uint64 = uint64(0),
+ reuse_puzhash: Optional[bool] = None,
+ ) -> Dict[str, Union[str, bool]]:
+ return {"success": True, "proposal_id": "0xCAFEF00D"}
+
+ async def get_wallets(self, wallet_type: Optional[WalletType] = None) -> List[Dict[str, Union[str, int]]]:
+ return [{"id": 1, "type": 0}, {"id": 2, "type": 14}]
+
+ async def get_transaction(self, wallet_id: int, transaction_id: bytes32) -> TransactionRecord:
+ return TransactionRecord(
+ confirmed_at_height=uint32(0),
+ created_at_time=uint64(int(time.time())),
+ to_puzzle_hash=bytes32(b"2" * 32),
+ amount=uint64(10),
+ fee_amount=uint64(1),
+ confirmed=True,
+ sent=uint32(10),
+ spend_bundle=None,
+ additions=[],
+ removals=[],
+ wallet_id=uint32(1),
+ sent_to=[("peer1", uint8(1), None)],
+ trade_id=None,
+ type=uint32(TransactionType.INCOMING_TX.value),
+ name=bytes32(b"x" * 32),
+ memos=[],
+ valid_times=parse_timelock_info(tuple()),
+ )
+
+ # List all proposals
+ inst_rpc_client = DAOCreateRpcClient() # pylint: disable=no-value-for-parameter
+ test_rpc_clients.wallet_rpc_client = inst_rpc_client
+ list_args = ["dao", "list_proposals", FINGERPRINT_ARG, "-i 2"]
+ # these are various things that should be in the output
+ list_asserts = [
+ "Proposal ID: 0xCAFEF00D",
+ "Status: OPEN",
+ "Votes for: 10",
+ "Votes against: 0",
+ "Proposal ID: 0xFEEDBEEF",
+ "Status: CLOSED",
+ "Votes for: 100",
+ "Votes against: 20",
+ "Proposals have 10 blocks of soft close time.",
+ ]
+ run_cli_command_and_assert(capsys, root_dir, list_args, list_asserts)
+
+ # Show details of specific proposal
+ parse_spend_args = ["dao", "show_proposal", FINGERPRINT_ARG, "-i 2", "-p", "0xCAFEF00D"]
+ address = encode_puzzle_hash(bytes32(b"1" * 32), "xch")
+ asset_id = bytes32(b"2" * 32).hex()
+ parse_spend_asserts = [
+ "Type: SPEND",
+ "Status: OPEN",
+ "Passed: False",
+ "Closable: False",
+ "Total votes needed: 10",
+ "Yes votes needed: 20",
+ "Blocks remaining: 30",
+ "Proposal XCH Conditions",
+ f"Address: {address}",
+ "Amount: 100",
+ "Proposal asset Conditions",
+ f"Asset ID: {asset_id}",
+ f"Address: {address}",
+ "Amount: 123",
+ ]
+ run_cli_command_and_assert(capsys, root_dir, parse_spend_args, parse_spend_asserts)
+
+ parse_update_args = ["dao", "show_proposal", FINGERPRINT_ARG, "-i2", "-p", "0xFEEDBEEF"]
+ parse_update_asserts = [
+ "Type: UPDATE",
+ "proposal_timelock: 10",
+ "soft_close_length: 50",
+ ]
+ run_cli_command_and_assert(capsys, root_dir, parse_update_args, parse_update_asserts)
+
+ parse_mint_args = ["dao", "show_proposal", FINGERPRINT_ARG, "-i2", "-p", "0xDABBAD00"]
+ parse_mint_asserts = [
+ "Type: MINT",
+ "Amount of CAT to mint: 1000",
+ "Address: {}".format(encode_puzzle_hash(bytes32(b"x" * 32), "xch")),
+ ]
+ run_cli_command_and_assert(capsys, root_dir, parse_mint_args, parse_mint_asserts)
+
+ # Vote on a proposal
+ vote_args = ["dao", "vote", FINGERPRINT_ARG, "-i 2", "-p", "0xFEEDBEEF", "-a", "1000", "-n", "-m 0.1", "--reuse"]
+ vote_asserts = ["Transaction submitted to nodes"]
+ run_cli_command_and_assert(capsys, root_dir, vote_args, vote_asserts)
+
+ # Close a proposal
+ close_args = ["dao", "close_proposal", FINGERPRINT_ARG, "-i 2", "-p", "0xFEEDBEEF", "-d", "-m 0.1", "--reuse"]
+ close_asserts = ["Transaction submitted to nodes"]
+ run_cli_command_and_assert(capsys, root_dir, close_args, close_asserts)
+
+ # Create a spend proposal
+ address = encode_puzzle_hash(bytes32(b"x" * 32), "xch")
+ spend_args = [
+ "dao",
+ "create_proposal",
+ "spend",
+ FINGERPRINT_ARG,
+ "-i 2",
+ "-t",
+ address,
+ "-a",
+ "10",
+ "-v",
+ "1000",
+ "--asset-id",
+ "0xFEEDBEEF",
+ "-m 0.1",
+ "--reuse",
+ ]
+ proposal_asserts = ["Successfully created proposal", "Proposal ID: 0xCAFEF00D"]
+ run_cli_command_and_assert(capsys, root_dir, spend_args, proposal_asserts)
+
+ bad_spend_args = [
+ "dao",
+ "create_proposal",
+ "spend",
+ FINGERPRINT_ARG,
+ "-i 2",
+ "-t",
+ address,
+ "-v",
+ "1000",
+ "--asset-id",
+ "0xFEEDBEEF",
+ "-m 0.1",
+ "--reuse",
+ ]
+ proposal_asserts = ["Successfully created proposal", "Proposal ID: 0xCAFEF00D"]
+ with pytest.raises(ValueError) as e_info:
+ run_cli_command_and_assert(capsys, root_dir, bad_spend_args, proposal_asserts)
+ assert e_info.value.args[0] == "Must include a json specification or an address / amount pair."
+
+ # Create an update proposal
+ update_args = [
+ "dao",
+ "create_proposal",
+ "update",
+ FINGERPRINT_ARG,
+ "-i 2",
+ "-v",
+ "1000",
+ "--proposal-timelock",
+ "4",
+ "-m 0.1",
+ "--reuse",
+ ]
+ run_cli_command_and_assert(capsys, root_dir, update_args, proposal_asserts)
+
+ # Create a mint proposal
+ mint_args = [
+ "dao",
+ "create_proposal",
+ "mint",
+ FINGERPRINT_ARG,
+ "-i 2",
+ "-v",
+ "1000",
+ "-a",
+ "100",
+ "-t",
+ address,
+ "-m 0.1",
+ "--reuse",
+ ]
+ run_cli_command_and_assert(capsys, root_dir, mint_args, proposal_asserts)
+
+
+def test_dao_cats(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None:
+ test_rpc_clients, root_dir = get_test_cli_clients
+
+ # set RPC Client
+ class DAOCreateRpcClient(TestWalletRpcClient):
+ async def dao_send_to_lockup(
+ self,
+ wallet_id: int,
+ amount: uint64,
+ tx_config: TXConfig,
+ fee: uint64 = uint64(0),
+ reuse_puzhash: Optional[bool] = None,
+ ) -> Dict[str, Union[str, int]]:
+ return {"success": True, "tx_id": bytes32(b"x" * 32).hex()}
+
+ async def dao_free_coins_from_finished_proposals(
+ self,
+ wallet_id: int,
+ tx_config: TXConfig,
+ fee: uint64 = uint64(0),
+ reuse_puzhash: Optional[bool] = None,
+ ) -> Dict[str, Union[str, int]]:
+ return {"success": True, "tx_id": bytes32(b"x" * 32).hex()}
+
+ async def dao_exit_lockup(
+ self,
+ wallet_id: int,
+ tx_config: TXConfig,
+ coins: Optional[List[Dict[str, Union[str, int]]]] = None,
+ fee: uint64 = uint64(0),
+ reuse_puzhash: Optional[bool] = None,
+ ) -> Dict[str, Union[str, int]]:
+ return {"success": True, "tx_id": bytes32(b"x" * 32).hex()}
+
+ async def get_transaction(self, wallet_id: int, transaction_id: bytes32) -> TransactionRecord:
+ return TransactionRecord(
+ confirmed_at_height=uint32(0),
+ created_at_time=uint64(int(time.time())),
+ to_puzzle_hash=bytes32(b"2" * 32),
+ amount=uint64(10),
+ fee_amount=uint64(1),
+ confirmed=True,
+ sent=uint32(10),
+ spend_bundle=None,
+ additions=[],
+ removals=[],
+ wallet_id=uint32(1),
+ sent_to=[("peer1", uint8(1), None)],
+ trade_id=None,
+ type=uint32(TransactionType.INCOMING_TX.value),
+ name=bytes32(b"x" * 32),
+ memos=[],
+ valid_times=parse_timelock_info(tuple()),
+ )
+
+ inst_rpc_client = DAOCreateRpcClient() # pylint: disable=no-value-for-parameter
+ test_rpc_clients.wallet_rpc_client = inst_rpc_client
+ lockup_args = ["dao", "lockup_coins", FINGERPRINT_ARG, "-i 2", "-a", "1000", "-m 0.1", "--reuse"]
+ lockup_asserts = ["Transaction submitted to nodes"]
+ run_cli_command_and_assert(capsys, root_dir, lockup_args, lockup_asserts)
+
+ release_args = ["dao", "release_coins", FINGERPRINT_ARG, "-i 2", "-m 0.1", "--reuse"]
+ # tx_id = bytes32(b"x" * 32).hex()
+ release_asserts = ["Transaction submitted to nodes"]
+ run_cli_command_and_assert(capsys, root_dir, release_args, release_asserts)
+
+ exit_args = ["dao", "exit_lockup", FINGERPRINT_ARG, "-i 2", "-m 0.1", "--reuse"]
+ exit_asserts = ["Transaction submitted to nodes"]
+ run_cli_command_and_assert(capsys, root_dir, exit_args, exit_asserts)
diff --git a/tests/pools/test_pool_puzzles_lifecycle.py b/tests/pools/test_pool_puzzles_lifecycle.py
--- a/tests/pools/test_pool_puzzles_lifecycle.py
+++ b/tests/pools/test_pool_puzzles_lifecycle.py
@@ -16,7 +16,6 @@
create_travel_spend,
create_waiting_room_inner_puzzle,
get_delayed_puz_info_from_launcher_spend,
- get_most_recent_singleton_coin_from_coin_spend,
get_pubkey_from_member_inner_puzzle,
get_seconds_and_delayed_puzhash_from_p2_singleton_puzzle,
is_pool_singleton_inner_puzzle,
@@ -39,6 +38,7 @@
puzzle_for_pk,
solution_for_conditions,
)
+from chia.wallet.singleton import get_most_recent_singleton_coin_from_coin_spend
from tests.clvm.coin_store import BadSpendBundleError, CoinStore, CoinTimestamp
from tests.clvm.test_puzzles import public_key_for_index, secret_exponent_for_index
from tests.util.key_tool import KeyTool
diff --git a/tests/wallet/dao_wallet/config.py b/tests/wallet/dao_wallet/config.py
new file mode 100644
--- /dev/null
+++ b/tests/wallet/dao_wallet/config.py
@@ -0,0 +1,3 @@
+from __future__ import annotations
+
+checkout_blocks_and_plots = True
diff --git a/tests/wallet/dao_wallet/test_dao_clvm.py b/tests/wallet/dao_wallet/test_dao_clvm.py
new file mode 100644
--- /dev/null
+++ b/tests/wallet/dao_wallet/test_dao_clvm.py
@@ -0,0 +1,1261 @@
+from __future__ import annotations
+
+from typing import Any, List, Optional, Tuple
+
+import pytest
+from blspy import AugSchemeMPL
+from clvm.casts import int_to_bytes
+
+from chia.clvm.spend_sim import SimClient, SpendSim, sim_and_client
+from chia.types.blockchain_format.coin import Coin
+from chia.types.blockchain_format.program import INFINITE_COST, Program
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_spend import CoinSpend
+from chia.types.condition_opcodes import ConditionOpcode
+from chia.types.mempool_inclusion_status import MempoolInclusionStatus
+from chia.types.spend_bundle import SpendBundle
+from chia.util.condition_tools import conditions_dict_for_solution
+from chia.util.errors import Err
+from chia.util.hash import std_hash
+from chia.util.ints import uint32, uint64
+from chia.wallet.cat_wallet.cat_utils import CAT_MOD
+from chia.wallet.dao_wallet.dao_info import DAORules
+from chia.wallet.dao_wallet.dao_utils import curry_singleton, get_p2_singleton_puzhash, get_treasury_puzzle
+from chia.wallet.puzzles.load_clvm import load_clvm
+from chia.wallet.singleton import create_singleton_puzzle_hash
+
+CAT_MOD_HASH: bytes32 = CAT_MOD.get_tree_hash()
+SINGLETON_MOD: Program = load_clvm("singleton_top_layer_v1_1.clsp")
+SINGLETON_MOD_HASH: bytes32 = SINGLETON_MOD.get_tree_hash()
+SINGLETON_LAUNCHER: Program = load_clvm("singleton_launcher.clsp")
+SINGLETON_LAUNCHER_HASH: bytes32 = SINGLETON_LAUNCHER.get_tree_hash()
+DAO_LOCKUP_MOD: Program = load_clvm("dao_lockup.clsp")
+DAO_LOCKUP_MOD_HASH: bytes32 = DAO_LOCKUP_MOD.get_tree_hash()
+DAO_PROPOSAL_TIMER_MOD: Program = load_clvm("dao_proposal_timer.clsp")
+DAO_PROPOSAL_TIMER_MOD_HASH: bytes32 = DAO_PROPOSAL_TIMER_MOD.get_tree_hash()
+DAO_PROPOSAL_MOD: Program = load_clvm("dao_proposal.clsp")
+DAO_PROPOSAL_MOD_HASH: bytes32 = DAO_PROPOSAL_MOD.get_tree_hash()
+DAO_PROPOSAL_VALIDATOR_MOD: Program = load_clvm("dao_proposal_validator.clsp")
+DAO_PROPOSAL_VALIDATOR_MOD_HASH: bytes32 = DAO_PROPOSAL_VALIDATOR_MOD.get_tree_hash()
+DAO_TREASURY_MOD: Program = load_clvm("dao_treasury.clsp")
+DAO_TREASURY_MOD_HASH: bytes32 = DAO_TREASURY_MOD.get_tree_hash()
+SPEND_P2_SINGLETON_MOD: Program = load_clvm("dao_spend_p2_singleton_v2.clsp")
+SPEND_P2_SINGLETON_MOD_HASH: bytes32 = SPEND_P2_SINGLETON_MOD.get_tree_hash()
+DAO_FINISHED_STATE: Program = load_clvm("dao_finished_state.clsp")
+DAO_FINISHED_STATE_HASH: bytes32 = DAO_FINISHED_STATE.get_tree_hash()
+DAO_CAT_TAIL: Program = load_clvm(
+ "genesis_by_coin_id_or_singleton.clsp", package_or_requirement="chia.wallet.cat_wallet.puzzles"
+)
+DAO_CAT_TAIL_HASH: bytes32 = DAO_CAT_TAIL.get_tree_hash()
+P2_SINGLETON_MOD: Program = load_clvm("p2_singleton_via_delegated_puzzle.clsp")
+P2_SINGLETON_MOD_HASH: bytes32 = P2_SINGLETON_MOD.get_tree_hash()
+P2_SINGLETON_AGGREGATOR_MOD: Program = load_clvm("p2_singleton_aggregator.clsp")
+P2_SINGLETON_AGGREGATOR_MOD_HASH: bytes32 = P2_SINGLETON_AGGREGATOR_MOD.get_tree_hash()
+DAO_UPDATE_MOD: Program = load_clvm("dao_update_proposal.clsp")
+DAO_UPDATE_MOD_HASH: bytes32 = DAO_UPDATE_MOD.get_tree_hash()
+
+
+def test_finished_state() -> None:
+ """
+ Once a proposal has closed, it becomes a 'beacon' singleton which announces its proposal ID. This is referred to as the finished state and is used to confirm that a proposal has closed in order to release voting CATs from the lockup puzzle.
+ """
+ proposal_id: Program = Program.to("proposal_id").get_tree_hash()
+ singleton_struct: Program = Program.to(
+ (SINGLETON_MOD.get_tree_hash(), (proposal_id, SINGLETON_LAUNCHER.get_tree_hash()))
+ )
+ finished_inner_puz = DAO_FINISHED_STATE.curry(singleton_struct, DAO_FINISHED_STATE_HASH)
+ finished_full_puz = SINGLETON_MOD.curry(singleton_struct, finished_inner_puz)
+ inner_sol = Program.to([1])
+
+ conds = finished_inner_puz.run(inner_sol).as_python()
+ assert conds[0][1] == finished_full_puz.get_tree_hash()
+ assert conds[2][1] == finished_inner_puz.get_tree_hash()
+
+ lineage = Program.to([proposal_id, finished_inner_puz.get_tree_hash(), 1])
+ full_sol = Program.to([lineage, 1, inner_sol])
+
+ conds = conditions_dict_for_solution(finished_full_puz, full_sol, INFINITE_COST)
+ assert conds[ConditionOpcode.ASSERT_MY_PUZZLEHASH][0].vars[0] == finished_full_puz.get_tree_hash()
+ assert conds[ConditionOpcode.CREATE_COIN][0].vars[0] == finished_full_puz.get_tree_hash()
+
+
+def test_proposal() -> None:
+ """
+ This test covers the three paths for closing a proposal:
+ - Close a passed proposal
+ - Close a failed proposal
+ - Self-destruct a broken proposal
+ """
+ proposal_pass_percentage: uint64 = uint64(5100)
+ CAT_TAIL_HASH: Program = Program.to("tail").get_tree_hash()
+ treasury_id: Program = Program.to("treasury").get_tree_hash()
+ singleton_id: Program = Program.to("singleton_id").get_tree_hash()
+ singleton_struct: Program = Program.to(
+ (SINGLETON_MOD.get_tree_hash(), (singleton_id, SINGLETON_LAUNCHER.get_tree_hash()))
+ )
+ self_destruct_time = 1000 # number of blocks
+ oracle_spend_delay = 10
+ active_votes_list = [0xFADEDDAB] # are the the ids of previously voted on proposals?
+ acs: Program = Program.to(1)
+ acs_ph: bytes32 = acs.get_tree_hash()
+
+ dao_lockup_self = DAO_LOCKUP_MOD.curry(
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ DAO_FINISHED_STATE_HASH,
+ CAT_MOD_HASH,
+ CAT_TAIL_HASH,
+ )
+
+ proposal_curry_one = DAO_PROPOSAL_MOD.curry(
+ DAO_PROPOSAL_TIMER_MOD_HASH,
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ CAT_MOD_HASH,
+ DAO_FINISHED_STATE_HASH,
+ DAO_TREASURY_MOD_HASH,
+ dao_lockup_self.get_tree_hash(),
+ CAT_TAIL_HASH,
+ treasury_id,
+ )
+
+ # make a lockup puz for the dao cat
+ lockup_puz = dao_lockup_self.curry(
+ dao_lockup_self.get_tree_hash(),
+ active_votes_list,
+ acs, # innerpuz
+ )
+
+ dao_cat_puz: Program = CAT_MOD.curry(CAT_MOD_HASH, CAT_TAIL_HASH, lockup_puz)
+ dao_cat_puzhash: bytes32 = dao_cat_puz.get_tree_hash()
+
+ # Test Voting
+ current_yes_votes = 20
+ current_total_votes = 100
+ full_proposal: Program = proposal_curry_one.curry(
+ proposal_curry_one.get_tree_hash(),
+ singleton_id,
+ acs_ph,
+ current_yes_votes,
+ current_total_votes,
+ )
+
+ vote_amount = 10
+ vote_type = 1 # yes vote
+ vote_coin_id = Program.to("vote_coin").get_tree_hash()
+ solution: Program = Program.to(
+ [
+ [vote_amount], # vote amounts
+ vote_type, # vote type (yes)
+ [vote_coin_id], # vote coin ids
+ [active_votes_list], # previous votes
+ [acs_ph], # lockup inner puz hash
+ 0, # inner puz reveal
+ 0, # soft close len
+ self_destruct_time,
+ oracle_spend_delay,
+ 0,
+ 1,
+ ]
+ )
+
+ # Run the proposal and check its conditions
+ conditions = conditions_dict_for_solution(full_proposal, solution, INFINITE_COST)
+
+ # Puzzle Announcement of vote_coin_ids
+ assert bytes32(conditions[ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT][0].vars[0]) == vote_coin_id
+
+ # Assert puzzle announcement from dao_cat of proposal_id and all vote details
+ apa_msg = Program.to([singleton_id, vote_amount, vote_type, vote_coin_id]).get_tree_hash()
+ assert conditions[ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT][0].vars[0] == std_hash(dao_cat_puzhash + apa_msg)
+
+ # Check that the proposal recreates itself with updated vote amounts
+ next_proposal: Program = proposal_curry_one.curry(
+ proposal_curry_one.get_tree_hash(),
+ singleton_id,
+ acs_ph,
+ current_yes_votes + vote_amount,
+ current_total_votes + vote_amount,
+ )
+ assert bytes32(conditions[ConditionOpcode.CREATE_COIN][0].vars[0]) == next_proposal.get_tree_hash()
+ assert conditions[ConditionOpcode.CREATE_COIN][0].vars[1] == int_to_bytes(1)
+
+ # Try to vote using multiple coin ids
+ vote_coin_id_1 = Program.to("vote_coin_1").get_tree_hash()
+ vote_coin_id_2 = Program.to("vote_coin_2").get_tree_hash()
+ repeat_solution_1: Program = Program.to(
+ [
+ [vote_amount, 20], # vote amounts
+ vote_type, # vote type (yes)
+ [vote_coin_id_1, vote_coin_id_2], # vote coin ids
+ [active_votes_list, 0], # previous votes
+ [acs_ph, acs_ph], # lockup inner puz hash
+ 0, # inner puz reveal
+ 0, # soft close len
+ self_destruct_time,
+ oracle_spend_delay,
+ 0,
+ 1,
+ ]
+ )
+
+ conds_repeated = conditions_dict_for_solution(full_proposal, repeat_solution_1, INFINITE_COST)
+ assert len(conds_repeated) == 4
+
+ # Try to vote using repeated coin ids
+ repeat_solution_2: Program = Program.to(
+ [
+ [vote_amount, vote_amount, 20], # vote amounts
+ vote_type, # vote type (yes)
+ [vote_coin_id_1, vote_coin_id_1, vote_coin_id_2], # vote coin ids
+ [active_votes_list], # previous votes
+ [acs_ph], # lockup inner puz hash
+ 0, # inner puz reveal
+ 0, # soft close len
+ self_destruct_time,
+ oracle_spend_delay,
+ 0,
+ 1,
+ ]
+ )
+
+ with pytest.raises(ValueError) as e_info:
+ conditions_dict_for_solution(full_proposal, repeat_solution_2, INFINITE_COST)
+ assert e_info.value.args[0] == "clvm raise"
+
+ # Test Launch
+ current_yes_votes = 0
+ current_total_votes = 0
+ launch_proposal: Program = proposal_curry_one.curry(
+ proposal_curry_one.get_tree_hash(),
+ singleton_id,
+ acs_ph,
+ current_yes_votes,
+ current_total_votes,
+ )
+ vote_amount = 10
+ vote_type = 1 # yes vote
+ vote_coin_id = Program.to("vote_coin").get_tree_hash()
+ solution = Program.to(
+ [
+ [vote_amount], # vote amounts
+ vote_type, # vote type (yes)
+ [vote_coin_id], # vote coin ids
+ # TODO: Check whether previous votes should be 0 in the first spend since
+ # proposal looks at (f previous_votes) during loop_over_vote_coins
+ [0], # previous votes
+ [acs_ph], # lockup inner puz hash
+ acs, # inner puz reveal
+ 0, # soft close len
+ self_destruct_time,
+ oracle_spend_delay,
+ 0,
+ 1,
+ ]
+ )
+ # Run the proposal and check its conditions
+ conditions = conditions_dict_for_solution(launch_proposal, solution, INFINITE_COST)
+ # check that the timer is created
+ timer_puz = DAO_PROPOSAL_TIMER_MOD.curry(
+ proposal_curry_one.get_tree_hash(),
+ singleton_struct,
+ )
+ timer_puzhash = timer_puz.get_tree_hash()
+ assert conditions[ConditionOpcode.CREATE_COIN][1].vars[0] == timer_puzhash
+
+ # Test exits
+
+ # Test attempt to close a passing proposal
+ current_yes_votes = 200
+ current_total_votes = 350
+ full_proposal = proposal_curry_one.curry(
+ proposal_curry_one.get_tree_hash(),
+ singleton_id,
+ acs_ph,
+ current_yes_votes,
+ current_total_votes,
+ )
+ attendance_required = 200
+ proposal_timelock = 20
+ soft_close_length = 5
+ solution = Program.to(
+ [
+ Program.to("validator_hash").get_tree_hash(),
+ 0,
+ # Program.to("receiver_hash").get_tree_hash(), # not needed anymore?
+ proposal_timelock,
+ proposal_pass_percentage,
+ attendance_required,
+ 0,
+ soft_close_length,
+ self_destruct_time,
+ oracle_spend_delay,
+ 0,
+ 1,
+ ]
+ )
+
+ conds = conditions_dict_for_solution(full_proposal, solution, INFINITE_COST)
+
+ # make a matching treasury puzzle for the APA
+ treasury_inner: Program = DAO_TREASURY_MOD.curry(
+ DAO_TREASURY_MOD_HASH,
+ Program.to("validator_hash"),
+ proposal_timelock,
+ soft_close_length,
+ attendance_required,
+ proposal_pass_percentage,
+ self_destruct_time,
+ oracle_spend_delay,
+ )
+ treasury: Program = SINGLETON_MOD.curry(
+ Program.to((SINGLETON_MOD_HASH, (treasury_id, SINGLETON_LAUNCHER_HASH))),
+ treasury_inner,
+ )
+ treasury_puzhash = treasury.get_tree_hash()
+ apa_msg = singleton_id
+
+ timer_apa = std_hash(timer_puzhash + singleton_id)
+ assert conds[ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT][0].vars[0] == timer_apa
+ assert conds[ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT][1].vars[0] == std_hash(treasury_puzhash + apa_msg)
+
+ # close a failed proposal
+ full_proposal = proposal_curry_one.curry(
+ proposal_curry_one.get_tree_hash(),
+ singleton_id,
+ acs_ph,
+ 20, # failing number of yes votes
+ current_total_votes,
+ )
+ solution = Program.to(
+ [
+ Program.to("validator_hash").get_tree_hash(),
+ 0,
+ # Program.to("receiver_hash").get_tree_hash(), # not needed anymore?
+ proposal_timelock,
+ proposal_pass_percentage,
+ attendance_required,
+ 0,
+ soft_close_length,
+ self_destruct_time,
+ oracle_spend_delay,
+ 0,
+ 1,
+ ]
+ )
+ conds = conditions_dict_for_solution(full_proposal, solution, INFINITE_COST)
+ apa_msg = int_to_bytes(0)
+ assert conds[ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT][1].vars[0] == std_hash(treasury_puzhash + apa_msg)
+ assert conds[ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT][0].vars[0] == timer_apa
+
+ finished_puz = DAO_FINISHED_STATE.curry(singleton_struct, DAO_FINISHED_STATE_HASH)
+ assert conds[ConditionOpcode.CREATE_COIN][0].vars[0] == finished_puz.get_tree_hash()
+
+ # self destruct a proposal
+ attendance_required = 200
+ solution = Program.to(
+ [
+ Program.to("validator_hash").get_tree_hash(),
+ 0,
+ # Program.to("receiver_hash").get_tree_hash(), # not needed anymore?
+ proposal_timelock,
+ proposal_pass_percentage,
+ attendance_required,
+ 0,
+ soft_close_length,
+ self_destruct_time,
+ oracle_spend_delay,
+ 1,
+ 1,
+ ]
+ )
+ conds = conditions_dict_for_solution(full_proposal, solution, INFINITE_COST)
+ assert conds[ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT][0].vars[0] == std_hash(treasury_puzhash + apa_msg)
+ assert conds[ConditionOpcode.CREATE_COIN][0].vars[0] == finished_puz.get_tree_hash()
+
+
+def test_proposal_timer() -> None:
+ """
+ The timer puzzle is created at the same time as a proposal, and enforces a relative time condition on proposals
+ The closing time is passed in via the timer solution and confirmed via announcement from the proposal.
+ It creates/asserts announcements to pair it with the finishing spend of a proposal.
+ The timer puzzle only has one spend path so there is only one test case for this puzzle.
+ """
+ CAT_TAIL_HASH: Program = Program.to("tail").get_tree_hash()
+ treasury_id: Program = Program.to("treasury").get_tree_hash()
+ singleton_id: Program = Program.to("singleton_id").get_tree_hash()
+ singleton_struct: Program = Program.to(
+ (SINGLETON_MOD.get_tree_hash(), (singleton_id, SINGLETON_LAUNCHER.get_tree_hash()))
+ )
+ dao_lockup_self = DAO_LOCKUP_MOD.curry(
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ DAO_FINISHED_STATE_HASH,
+ CAT_MOD_HASH,
+ CAT_TAIL_HASH,
+ )
+
+ proposal_curry_one = DAO_PROPOSAL_MOD.curry(
+ DAO_PROPOSAL_TIMER_MOD_HASH,
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ CAT_MOD_HASH,
+ DAO_FINISHED_STATE_HASH,
+ DAO_TREASURY_MOD_HASH,
+ dao_lockup_self.get_tree_hash(),
+ CAT_TAIL_HASH,
+ treasury_id,
+ )
+
+ proposal_timer_full: Program = DAO_PROPOSAL_TIMER_MOD.curry(
+ proposal_curry_one.get_tree_hash(),
+ singleton_struct,
+ )
+
+ timelock = int_to_bytes(101)
+ parent_parent_id = Program.to("parent_parent").get_tree_hash()
+ parent_amount = 2000
+ solution: Program = Program.to(
+ [
+ 140, # yes votes
+ 180, # total votes
+ Program.to(1).get_tree_hash(), # proposal innerpuz
+ timelock,
+ parent_parent_id,
+ parent_amount,
+ ]
+ )
+ # run the timer puzzle.
+ conds = conditions_dict_for_solution(proposal_timer_full, solution, INFINITE_COST)
+ assert len(conds) == 4
+
+ # Validate the output conditions
+ # Check the timelock is present
+ assert conds[ConditionOpcode.ASSERT_HEIGHT_RELATIVE][0].vars[0] == timelock
+ # Check the proposal id is announced by the timer puz
+ assert conds[ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT][0].vars[0] == singleton_id
+ # Check the proposal puz announces the timelock
+ expected_proposal_puzhash: bytes32 = create_singleton_puzzle_hash(
+ proposal_curry_one.curry(
+ proposal_curry_one.get_tree_hash(), singleton_id, Program.to(1).get_tree_hash(), 140, 180
+ ).get_tree_hash(),
+ singleton_id,
+ )
+ assert conds[ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT][0].vars[0] == std_hash(
+ expected_proposal_puzhash + timelock
+ )
+ # Check the parent is a proposal
+ expected_parent_puzhash: bytes32 = create_singleton_puzzle_hash(
+ proposal_curry_one.curry(
+ proposal_curry_one.get_tree_hash(),
+ singleton_id,
+ Program.to(1).get_tree_hash(),
+ 0,
+ 0,
+ ).get_tree_hash(),
+ singleton_id,
+ )
+ parent_id = std_hash(parent_parent_id + expected_parent_puzhash + int_to_bytes(parent_amount))
+ assert conds[ConditionOpcode.ASSERT_MY_PARENT_ID][0].vars[0] == parent_id
+
+
+def test_validator() -> None:
+ """
+ The proposal validator is run by the treasury when a passing proposal is closed.
+ Its main purpose is to check that the proposal's vote amounts adehere to the DAO rules contained in the treasury (which are passed in from the treasury as Truth values).
+ It creates a puzzle announcement of the proposal ID, that the proposal itself asserts.
+ It also spends the value held in the proposal to the excess payout puzhash.
+
+ The test cases covered are:
+ - Executing a spend proposal in which the validator executes the spend of a 'spend_p2_singleton` coin. This is just a proposal that spends some the treasury
+ - Executing an update proposal that changes the DAO rules.
+ """
+ # Setup the treasury
+ treasury_id: Program = Program.to("treasury_id").get_tree_hash()
+ treasury_struct: Program = Program.to((SINGLETON_MOD_HASH, (treasury_id, SINGLETON_LAUNCHER_HASH)))
+
+ # Setup the proposal
+ proposal_id: Program = Program.to("proposal_id").get_tree_hash()
+ proposal_struct: Program = Program.to((SINGLETON_MOD.get_tree_hash(), (proposal_id, SINGLETON_LAUNCHER_HASH)))
+ CAT_TAIL_HASH: Program = Program.to("tail").get_tree_hash()
+ acs: Program = Program.to(1)
+ acs_ph: bytes32 = acs.get_tree_hash()
+
+ p2_singleton = P2_SINGLETON_MOD.curry(treasury_struct, P2_SINGLETON_AGGREGATOR_MOD)
+ p2_singleton_puzhash = p2_singleton.get_tree_hash()
+ parent_id = Program.to("parent").get_tree_hash()
+ locked_amount = 100000
+ spend_amount = 1100
+ conditions = [[51, 0xDABBAD00, 1000], [51, 0xCAFEF00D, 100]]
+
+ # Setup the validator
+ minimum_amt = 1
+ excess_puzhash = bytes32(b"1" * 32)
+ dao_lockup_self = DAO_LOCKUP_MOD.curry(
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ DAO_FINISHED_STATE_HASH,
+ CAT_MOD_HASH,
+ CAT_TAIL_HASH,
+ )
+
+ proposal_curry_one = DAO_PROPOSAL_MOD.curry(
+ DAO_PROPOSAL_TIMER_MOD_HASH,
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ CAT_MOD_HASH,
+ DAO_FINISHED_STATE_HASH,
+ DAO_TREASURY_MOD_HASH,
+ dao_lockup_self.get_tree_hash(),
+ CAT_TAIL_HASH,
+ treasury_id,
+ )
+ proposal_validator = DAO_PROPOSAL_VALIDATOR_MOD.curry(
+ treasury_struct,
+ proposal_curry_one.get_tree_hash(),
+ minimum_amt,
+ excess_puzhash,
+ )
+
+ # Can now create the treasury inner puz
+ treasury_inner = DAO_TREASURY_MOD.curry(
+ DAO_TREASURY_MOD_HASH,
+ proposal_validator,
+ 10, # proposal len
+ 5, # soft close
+ 1000, # attendance
+ 5100, # pass margin
+ 20, # self_destruct len
+ 3, # oracle delay
+ )
+
+ # Setup the spend_p2_singleton (proposal inner puz)
+ spend_p2_singleton = SPEND_P2_SINGLETON_MOD.curry(
+ treasury_struct, CAT_MOD_HASH, conditions, [], p2_singleton_puzhash # tailhash conds
+ )
+ spend_p2_singleton_puzhash = spend_p2_singleton.get_tree_hash()
+
+ parent_amt_list = [[parent_id, locked_amount]]
+ cat_parent_amt_list: List[Optional[Any]] = []
+ spend_p2_singleton_solution = Program.to([parent_amt_list, cat_parent_amt_list, treasury_inner.get_tree_hash()])
+
+ output_conds = spend_p2_singleton.run(spend_p2_singleton_solution)
+
+ proposal: Program = proposal_curry_one.curry(
+ proposal_curry_one.get_tree_hash(),
+ proposal_id,
+ spend_p2_singleton_puzhash,
+ 950,
+ 1200,
+ )
+ full_proposal = SINGLETON_MOD.curry(proposal_struct, proposal)
+ proposal_amt = 10
+ proposal_coin_id = Coin(parent_id, full_proposal.get_tree_hash(), proposal_amt).name()
+ solution = Program.to(
+ [
+ 1000,
+ 5100,
+ [proposal_coin_id, spend_p2_singleton_puzhash, 0],
+ [proposal_id, 1200, 950, parent_id, proposal_amt],
+ output_conds,
+ ]
+ )
+
+ conds: Program = proposal_validator.run(solution)
+ assert len(conds.as_python()) == 7 + len(conditions)
+
+ # test update
+ proposal = proposal_curry_one.curry(
+ proposal_curry_one.get_tree_hash(),
+ proposal_id,
+ acs_ph,
+ 950,
+ 1200,
+ )
+ full_proposal = SINGLETON_MOD.curry(proposal_struct, proposal)
+ proposal_coin_id = Coin(parent_id, full_proposal.get_tree_hash(), proposal_amt).name()
+ solution = Program.to(
+ [
+ 1000,
+ 5100,
+ [proposal_coin_id, acs_ph, 0],
+ [proposal_id, 1200, 950, parent_id, proposal_amt],
+ [[51, 0xCAFEF00D, spend_amount]],
+ ]
+ )
+ conds = proposal_validator.run(solution)
+ assert len(conds.as_python()) == 3
+
+ return
+
+
+def test_spend_p2_singleton() -> None:
+ # Curried values
+ singleton_id: Program = Program.to("singleton_id").get_tree_hash()
+ singleton_struct: Program = Program.to((SINGLETON_MOD_HASH, (singleton_id, SINGLETON_LAUNCHER_HASH)))
+ p2_singleton_puzhash = P2_SINGLETON_MOD.curry(singleton_struct, P2_SINGLETON_AGGREGATOR_MOD).get_tree_hash()
+ cat_tail_1 = Program.to("cat_tail_1").get_tree_hash()
+ cat_tail_2 = Program.to("cat_tail_2").get_tree_hash()
+ conditions = [[51, 0xCAFEF00D, 100], [51, 0xFEEDBEEF, 200]]
+ list_of_tailhash_conds = [
+ [cat_tail_1, [[51, 0x8BADF00D, 123], [51, 0xF00DF00D, 321]]],
+ [cat_tail_2, [[51, 0x8BADF00D, 123], [51, 0xF00DF00D, 321]]],
+ ]
+
+ # Solution Values
+ xch_parent_amt_list = [[b"x" * 32, 10], [b"y" * 32, 100]]
+ cat_parent_amt_list = [
+ [cat_tail_1, [["b" * 32, 100], [b"c" * 32, 400]]],
+ [cat_tail_2, [[b"e" * 32, 100], [b"f" * 32, 400]]],
+ ]
+ # cat_parent_amt_list = []
+ treasury_inner_puzhash = Program.to("treasury_inner").get_tree_hash()
+
+ # Puzzle
+ spend_p2_puz = SPEND_P2_SINGLETON_MOD.curry(
+ singleton_struct, CAT_MOD_HASH, conditions, list_of_tailhash_conds, p2_singleton_puzhash
+ )
+
+ # Solution
+ spend_p2_sol = Program.to([xch_parent_amt_list, cat_parent_amt_list, treasury_inner_puzhash])
+
+ conds = spend_p2_puz.run(spend_p2_sol)
+ assert conds
+
+ # spend only cats
+ conditions = []
+ list_of_tailhash_conds = [
+ [cat_tail_1, [[51, b"q" * 32, 123], [51, b"w" * 32, 321]]],
+ [cat_tail_2, [[51, b"e" * 32, 123], [51, b"r" * 32, 321]]],
+ ]
+ xch_parent_amt_list = []
+ cat_parent_amt_list = [
+ [cat_tail_1, [[b"b" * 32, 100], [b"c" * 32, 400]]],
+ [cat_tail_2, [[b"e" * 32, 100], [b"f" * 32, 400]]],
+ ]
+ treasury_inner_puzhash = Program.to("treasury_inner").get_tree_hash()
+
+ # Puzzle
+ spend_p2_puz = SPEND_P2_SINGLETON_MOD.curry(
+ singleton_struct, CAT_MOD_HASH, conditions, list_of_tailhash_conds, p2_singleton_puzhash
+ )
+
+ # Solution
+ spend_p2_sol = Program.to([xch_parent_amt_list, cat_parent_amt_list, treasury_inner_puzhash])
+ conds = spend_p2_puz.run(spend_p2_sol)
+ assert conds
+
+
+def test_merge_p2_singleton() -> None:
+ """
+ The treasury funds are held by p2_singleton_via_delegated puzzles. Because a DAO can have a large number of these coins, it's possible to merge them together without requiring a treasury spend.
+ There are two cases tested:
+ - For the merge coins that do not create the single output coin, and
+ - For the coin that does create the output.
+ """
+ # Setup a singleton struct
+ singleton_inner: Program = Program.to(1)
+ singleton_id: Program = Program.to("singleton_id").get_tree_hash()
+ singleton_struct: Program = Program.to((SINGLETON_MOD_HASH, (singleton_id, SINGLETON_LAUNCHER_HASH)))
+
+ # Setup p2_singleton_via_delegated puz
+ my_id = Program.to("my_id").get_tree_hash()
+ p2_singleton = P2_SINGLETON_MOD.curry(singleton_struct, P2_SINGLETON_AGGREGATOR_MOD)
+ my_puzhash = p2_singleton.get_tree_hash()
+
+ # Spend to delegated puz
+ delegated_puz = Program.to(1)
+ delegated_sol = Program.to([[51, 0xCAFEF00D, 300]])
+ solution = Program.to([0, singleton_inner.get_tree_hash(), delegated_puz, delegated_sol, my_id])
+ conds = conditions_dict_for_solution(p2_singleton, solution, INFINITE_COST)
+ apa = std_hash(
+ SINGLETON_MOD.curry(singleton_struct, singleton_inner).get_tree_hash()
+ + Program.to([my_id, delegated_puz.get_tree_hash()]).get_tree_hash()
+ )
+ assert len(conds) == 4
+ assert conds[ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT][0].vars[0] == apa
+ assert conds[ConditionOpcode.CREATE_COIN][0].vars[1] == int_to_bytes(300)
+
+ # Merge Spend (not output creator)
+ output_parent_id = Program.to("output_parent").get_tree_hash()
+ output_coin_amount = 100
+ aggregator_sol = Program.to([my_id, my_puzhash, 300, 0, [output_parent_id, output_coin_amount]])
+ merge_p2_singleton_sol = Program.to([aggregator_sol, 0, 0, 0, 0])
+ conds = conditions_dict_for_solution(p2_singleton, merge_p2_singleton_sol, INFINITE_COST)
+ assert len(conds) == 4
+ assert conds[ConditionOpcode.ASSERT_MY_PUZZLEHASH][0].vars[0] == my_puzhash
+ assert conds[ConditionOpcode.CREATE_COIN_ANNOUNCEMENT][0].vars[0] == int_to_bytes(0)
+
+ # Merge Spend (output creator)
+ fake_parent_id = Program.to("fake_parent").get_tree_hash()
+ merged_coin_id = Coin(fake_parent_id, my_puzhash, 200).name()
+ merge_sol = Program.to([[my_id, my_puzhash, 100, [[fake_parent_id, my_puzhash, 200]], 0]])
+ conds = conditions_dict_for_solution(p2_singleton, merge_sol, INFINITE_COST)
+ assert len(conds) == 7
+ assert conds[ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT][0].vars[0] == std_hash(merged_coin_id)
+ assert conds[ConditionOpcode.CREATE_COIN][0].vars[1] == int_to_bytes(300)
+ return
+
+
+def test_treasury() -> None:
+ """
+ The treasury has two spend paths:
+ - Proposal Path: when a proposal is being closed the treasury spend runs the validator and the actual proposed code (if passed)
+ - Oracle Path: The treasury can make announcements about itself that are used to close invalid proposals
+ """
+ # Setup the treasury
+ treasury_id: Program = Program.to("treasury_id").get_tree_hash()
+ treasury_struct: Program = Program.to((SINGLETON_MOD_HASH, (treasury_id, SINGLETON_LAUNCHER_HASH)))
+ CAT_TAIL_HASH: Program = Program.to("tail").get_tree_hash()
+
+ proposal_id: Program = Program.to("singleton_id").get_tree_hash()
+ proposal_struct: Program = Program.to((SINGLETON_MOD_HASH, (proposal_id, SINGLETON_LAUNCHER_HASH)))
+ p2_singleton = P2_SINGLETON_MOD.curry(treasury_struct, P2_SINGLETON_AGGREGATOR_MOD)
+ p2_singleton_puzhash = p2_singleton.get_tree_hash()
+ parent_id = Program.to("parent").get_tree_hash()
+ locked_amount = 100000
+ oracle_spend_delay = 10
+ self_destruct_time = 1000
+ proposal_length = 40
+ soft_close_length = 5
+ attendance = 1000
+ pass_margin = 5100
+ conditions = [[51, 0xDABBAD00, 1000], [51, 0xCAFEF00D, 100]]
+
+ # Setup the validator
+ minimum_amt = 1
+ excess_puzhash = bytes32(b"1" * 32)
+ dao_lockup_self = DAO_LOCKUP_MOD.curry(
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ DAO_FINISHED_STATE_HASH,
+ CAT_MOD_HASH,
+ CAT_TAIL_HASH,
+ )
+
+ proposal_curry_one = DAO_PROPOSAL_MOD.curry(
+ DAO_PROPOSAL_TIMER_MOD_HASH,
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ CAT_MOD_HASH,
+ DAO_FINISHED_STATE_HASH,
+ DAO_TREASURY_MOD_HASH,
+ dao_lockup_self.get_tree_hash(),
+ CAT_TAIL_HASH,
+ treasury_id,
+ )
+ proposal_validator = DAO_PROPOSAL_VALIDATOR_MOD.curry(
+ treasury_struct,
+ proposal_curry_one.get_tree_hash(),
+ minimum_amt,
+ excess_puzhash,
+ )
+
+ # Can now create the treasury inner puz
+ treasury_inner = DAO_TREASURY_MOD.curry(
+ DAO_TREASURY_MOD_HASH,
+ proposal_validator,
+ proposal_length,
+ soft_close_length,
+ attendance,
+ pass_margin,
+ self_destruct_time,
+ oracle_spend_delay,
+ )
+
+ # Setup the spend_p2_singleton (proposal inner puz)
+ spend_p2_singleton = SPEND_P2_SINGLETON_MOD.curry(
+ treasury_struct, CAT_MOD_HASH, conditions, [], p2_singleton_puzhash # tailhash conds
+ )
+ spend_p2_singleton_puzhash = spend_p2_singleton.get_tree_hash()
+
+ parent_amt_list = [[parent_id, locked_amount]]
+ cat_parent_amt_list: List[Optional[Any]] = []
+ spend_p2_singleton_solution = Program.to([parent_amt_list, cat_parent_amt_list, treasury_inner.get_tree_hash()])
+
+ proposal: Program = proposal_curry_one.curry(
+ proposal_curry_one.get_tree_hash(),
+ proposal_id,
+ spend_p2_singleton_puzhash,
+ 950,
+ 1200,
+ )
+ full_proposal = SINGLETON_MOD.curry(proposal_struct, proposal)
+
+ # Oracle spend
+ solution: Program = Program.to([0, 0, 0, 0, 0, treasury_struct])
+ conds: Program = treasury_inner.run(solution)
+ assert len(conds.as_python()) == 3
+
+ # Proposal Spend
+ proposal_amt = 10
+ proposal_coin_id = Coin(parent_id, full_proposal.get_tree_hash(), proposal_amt).name()
+ solution = Program.to(
+ [
+ [proposal_coin_id, spend_p2_singleton_puzhash, 0, "s"],
+ [proposal_id, 1200, 950, parent_id, proposal_amt],
+ spend_p2_singleton,
+ spend_p2_singleton_solution,
+ ]
+ )
+ conds = treasury_inner.run(solution)
+ assert len(conds.as_python()) == 9 + len(conditions)
+
+
+def test_lockup() -> None:
+ """
+ The lockup puzzle tracks the voting records of DAO CATs. When a proposal is voted on the proposal ID is added to a list against which future votes are checked.
+ This test checks the addition of new votes to the lockup, and that you can't re-vote on a proposal twice.
+ """
+ CAT_TAIL_HASH: Program = Program.to("tail").get_tree_hash()
+
+ INNERPUZ = Program.to(1)
+ previous_votes = [0xFADEDDAB]
+
+ dao_lockup_self = DAO_LOCKUP_MOD.curry(
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ DAO_FINISHED_STATE_HASH,
+ CAT_MOD_HASH,
+ CAT_TAIL_HASH,
+ )
+
+ full_lockup_puz: Program = dao_lockup_self.curry(
+ dao_lockup_self.get_tree_hash(),
+ previous_votes,
+ INNERPUZ,
+ )
+ my_id = Program.to("my_id").get_tree_hash()
+ lockup_coin_amount = 20
+
+ # Test adding vote
+ new_proposal = 0xBADDADAB
+ new_vote_list = [new_proposal, 0xFADEDDAB]
+ child_puzhash = dao_lockup_self.curry(
+ dao_lockup_self.get_tree_hash(),
+ new_vote_list,
+ INNERPUZ,
+ ).get_tree_hash()
+ message = Program.to([new_proposal, lockup_coin_amount, 1, my_id]).get_tree_hash()
+ generated_conditions = [[51, child_puzhash, lockup_coin_amount], [62, message]]
+ solution: Program = Program.to(
+ [
+ my_id,
+ generated_conditions,
+ 20,
+ new_proposal,
+ INNERPUZ.get_tree_hash(), # fake proposal curry vals
+ 1,
+ 20,
+ child_puzhash,
+ 0,
+ ]
+ )
+ conds: Program = full_lockup_puz.run(solution)
+ assert len(conds.as_python()) == 6
+
+ # Test Re-voting on same proposal fails
+ new_proposal = 0xBADDADAB
+ new_vote_list = [new_proposal, 0xBADDADAB]
+ child_puzhash = dao_lockup_self.curry(
+ dao_lockup_self.get_tree_hash(),
+ new_vote_list,
+ INNERPUZ,
+ ).get_tree_hash()
+ message = Program.to([new_proposal, lockup_coin_amount, 1, my_id]).get_tree_hash()
+ generated_conditions = [[51, child_puzhash, lockup_coin_amount], [62, message]]
+ revote_solution: Program = Program.to(
+ [
+ my_id,
+ generated_conditions,
+ 20,
+ new_proposal,
+ INNERPUZ.get_tree_hash(), # fake proposal curry vals
+ 1,
+ 20,
+ child_puzhash,
+ 0,
+ ]
+ )
+ with pytest.raises(ValueError) as e_info:
+ conds = full_lockup_puz.run(revote_solution)
+ assert e_info.value.args[0] == "clvm raise"
+
+ # Test vote removal
+ solution = Program.to(
+ [
+ 0,
+ generated_conditions,
+ 20,
+ [0xFADEDDAB],
+ INNERPUZ.get_tree_hash(),
+ 0,
+ 0,
+ 0,
+ 0,
+ ]
+ )
+ conds = full_lockup_puz.run(solution)
+ assert len(conds.as_python()) == 3
+
+ new_innerpuz = Program.to("new_inner")
+ new_innerpuzhash = new_innerpuz.get_tree_hash()
+ child_lockup = dao_lockup_self.curry(
+ dao_lockup_self.get_tree_hash(),
+ previous_votes,
+ new_innerpuz,
+ ).get_tree_hash()
+ message = Program.to([0, 0, 0, my_id]).get_tree_hash()
+ spend_conds = [[51, child_lockup, lockup_coin_amount], [62, message]]
+ transfer_sol = Program.to(
+ [
+ my_id,
+ spend_conds,
+ lockup_coin_amount,
+ 0,
+ INNERPUZ.get_tree_hash(), # fake proposal curry vals
+ 0,
+ 0,
+ INNERPUZ.get_tree_hash(),
+ new_innerpuzhash,
+ ]
+ )
+ conds = full_lockup_puz.run(transfer_sol)
+ assert conds.at("rrrrfrf").as_atom() == child_lockup
+
+
+def test_proposal_lifecycle() -> None:
+ """
+ This test covers the whole lifecycle of a proposal and treasury. It's main function is to check that the announcement pairs between treasury and proposal are accurate. It covers the spend proposal and update proposal types.
+ """
+ proposal_pass_percentage: uint64 = uint64(5100)
+ attendance_required: uint64 = uint64(1000)
+ proposal_timelock: uint64 = uint64(40)
+ soft_close_length: uint64 = uint64(5)
+ self_destruct_time: uint64 = uint64(1000)
+ oracle_spend_delay: uint64 = uint64(10)
+ min_amt: uint64 = uint64(1)
+ CAT_TAIL_HASH: Program = Program.to("tail").get_tree_hash()
+
+ dao_rules = DAORules(
+ proposal_timelock=proposal_timelock,
+ soft_close_length=soft_close_length,
+ attendance_required=attendance_required,
+ pass_percentage=proposal_pass_percentage,
+ self_destruct_length=self_destruct_time,
+ oracle_spend_delay=oracle_spend_delay,
+ proposal_minimum_amount=min_amt,
+ )
+
+ # Setup the treasury
+ treasury_id: Program = Program.to("treasury_id").get_tree_hash()
+ treasury_singleton_struct: Program = Program.to((SINGLETON_MOD_HASH, (treasury_id, SINGLETON_LAUNCHER_HASH)))
+ treasury_amount = 1
+
+ # setup the p2_singleton
+ p2_singleton = P2_SINGLETON_MOD.curry(treasury_singleton_struct, P2_SINGLETON_AGGREGATOR_MOD)
+ p2_singleton_puzhash = p2_singleton.get_tree_hash()
+ parent_id = Program.to("parent").get_tree_hash()
+ locked_amount = 100000
+ conditions = [[51, 0xDABBAD00, 1000], [51, 0xCAFEF00D, 100]]
+
+ excess_puzhash = get_p2_singleton_puzhash(treasury_id)
+ dao_lockup_self = DAO_LOCKUP_MOD.curry(
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ DAO_FINISHED_STATE_HASH,
+ CAT_MOD_HASH,
+ CAT_TAIL_HASH,
+ )
+
+ proposal_curry_one = DAO_PROPOSAL_MOD.curry(
+ DAO_PROPOSAL_TIMER_MOD_HASH,
+ SINGLETON_MOD_HASH,
+ SINGLETON_LAUNCHER_HASH,
+ CAT_MOD_HASH,
+ DAO_FINISHED_STATE_HASH,
+ DAO_TREASURY_MOD_HASH,
+ dao_lockup_self.get_tree_hash(),
+ CAT_TAIL_HASH,
+ treasury_id,
+ )
+ proposal_validator = DAO_PROPOSAL_VALIDATOR_MOD.curry(
+ treasury_singleton_struct,
+ proposal_curry_one.get_tree_hash(),
+ min_amt,
+ excess_puzhash,
+ )
+
+ treasury_inner_puz: Program = DAO_TREASURY_MOD.curry(
+ DAO_TREASURY_MOD_HASH,
+ proposal_validator,
+ proposal_timelock,
+ soft_close_length,
+ attendance_required,
+ proposal_pass_percentage,
+ self_destruct_time,
+ oracle_spend_delay,
+ )
+ treasury_inner_puzhash = treasury_inner_puz.get_tree_hash()
+
+ calculated_treasury_puzhash = get_treasury_puzzle(dao_rules, treasury_id, CAT_TAIL_HASH).get_tree_hash()
+ assert treasury_inner_puzhash == calculated_treasury_puzhash
+
+ full_treasury_puz = SINGLETON_MOD.curry(treasury_singleton_struct, treasury_inner_puz)
+ full_treasury_puzhash = full_treasury_puz.get_tree_hash()
+
+ # Setup the spend_p2_singleton (proposal inner puz)
+ spend_p2_singleton = SPEND_P2_SINGLETON_MOD.curry(
+ treasury_singleton_struct, CAT_MOD_HASH, conditions, [], p2_singleton_puzhash # tailhash conds
+ )
+ spend_p2_singleton_puzhash = spend_p2_singleton.get_tree_hash()
+
+ parent_amt_list = [[parent_id, locked_amount]]
+ cat_parent_amt_list: List[Optional[Any]] = []
+ spend_p2_singleton_solution = Program.to([parent_amt_list, cat_parent_amt_list, treasury_inner_puzhash])
+
+ # Setup Proposal
+ proposal_id: Program = Program.to("proposal_id").get_tree_hash()
+ proposal_singleton_struct: Program = Program.to((SINGLETON_MOD_HASH, (proposal_id, SINGLETON_LAUNCHER_HASH)))
+
+ current_votes = 1200
+ yes_votes = 950
+ proposal: Program = proposal_curry_one.curry(
+ proposal_curry_one.get_tree_hash(),
+ proposal_id,
+ spend_p2_singleton_puzhash,
+ yes_votes,
+ current_votes,
+ )
+ full_proposal: Program = SINGLETON_MOD.curry(proposal_singleton_struct, proposal)
+ full_proposal_puzhash: bytes32 = full_proposal.get_tree_hash()
+ proposal_amt = 11
+ proposal_coin_id = Coin(parent_id, full_proposal_puzhash, proposal_amt).name()
+
+ treasury_solution: Program = Program.to(
+ [
+ [proposal_coin_id, spend_p2_singleton_puzhash, 0],
+ [proposal_id, current_votes, yes_votes, parent_id, proposal_amt],
+ spend_p2_singleton,
+ spend_p2_singleton_solution,
+ ]
+ )
+
+ proposal_solution = Program.to(
+ [
+ proposal_validator.get_tree_hash(),
+ 0,
+ proposal_timelock,
+ proposal_pass_percentage,
+ attendance_required,
+ 0,
+ soft_close_length,
+ self_destruct_time,
+ oracle_spend_delay,
+ 0,
+ proposal_amt,
+ ]
+ )
+
+ # lineage_proof my_amount inner_solution
+ lineage_proof = [treasury_id, treasury_inner_puzhash, treasury_amount]
+ full_treasury_solution = Program.to([lineage_proof, treasury_amount, treasury_solution])
+ full_proposal_solution = Program.to([lineage_proof, proposal_amt, proposal_solution])
+
+ # Run the puzzles
+ treasury_conds = conditions_dict_for_solution(full_treasury_puz, full_treasury_solution, INFINITE_COST)
+ proposal_conds = conditions_dict_for_solution(full_proposal, full_proposal_solution, INFINITE_COST)
+
+ # Announcements
+ treasury_aca = treasury_conds[ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT][0].vars[0]
+ proposal_cca = proposal_conds[ConditionOpcode.CREATE_COIN_ANNOUNCEMENT][0].vars[0]
+ assert std_hash(proposal_coin_id + proposal_cca) == treasury_aca
+
+ treasury_cpas = [
+ std_hash(full_treasury_puzhash + cond.vars[0])
+ for cond in treasury_conds[ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT]
+ ]
+ proposal_apas = [cond.vars[0] for cond in proposal_conds[ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT]]
+ assert treasury_cpas[1] == proposal_apas[1]
+
+ # Test Proposal to update treasury
+ # Set up new treasury params
+ new_proposal_pass_percentage: uint64 = uint64(2500)
+ new_attendance_required: uint64 = uint64(500)
+ new_proposal_timelock: uint64 = uint64(900)
+ new_soft_close_length: uint64 = uint64(10)
+ new_self_destruct_time: uint64 = uint64(1000)
+ new_oracle_spend_delay: uint64 = uint64(20)
+ new_minimum_amount: uint64 = uint64(10)
+ proposal_excess_puzhash: bytes32 = get_p2_singleton_puzhash(treasury_id)
+
+ new_dao_rules = DAORules(
+ proposal_timelock=new_proposal_timelock,
+ soft_close_length=new_soft_close_length,
+ attendance_required=new_attendance_required,
+ pass_percentage=new_proposal_pass_percentage,
+ self_destruct_length=new_self_destruct_time,
+ oracle_spend_delay=new_oracle_spend_delay,
+ proposal_minimum_amount=new_minimum_amount,
+ )
+
+ update_proposal = DAO_UPDATE_MOD.curry(
+ DAO_TREASURY_MOD_HASH,
+ DAO_PROPOSAL_VALIDATOR_MOD_HASH,
+ treasury_singleton_struct,
+ proposal_curry_one.get_tree_hash(),
+ new_minimum_amount,
+ proposal_excess_puzhash,
+ new_proposal_timelock,
+ new_soft_close_length,
+ new_attendance_required,
+ new_proposal_pass_percentage,
+ new_self_destruct_time,
+ new_oracle_spend_delay,
+ )
+ update_proposal_puzhash = update_proposal.get_tree_hash()
+ update_proposal_sol = Program.to([])
+
+ proposal = proposal_curry_one.curry(
+ proposal_curry_one.get_tree_hash(),
+ proposal_id,
+ update_proposal_puzhash,
+ yes_votes,
+ current_votes,
+ )
+ full_proposal = SINGLETON_MOD.curry(proposal_singleton_struct, proposal)
+ full_proposal_puzhash = full_proposal.get_tree_hash()
+ proposal_coin_id = Coin(parent_id, full_proposal_puzhash, proposal_amt).name()
+
+ treasury_solution = Program.to(
+ [
+ [proposal_coin_id, update_proposal_puzhash, 0, "u"],
+ [proposal_id, current_votes, yes_votes, parent_id, proposal_amt],
+ update_proposal,
+ update_proposal_sol,
+ ]
+ )
+
+ proposal_solution = Program.to(
+ [
+ proposal_validator.get_tree_hash(),
+ 0,
+ proposal_timelock,
+ proposal_pass_percentage,
+ attendance_required,
+ 0,
+ soft_close_length,
+ self_destruct_time,
+ oracle_spend_delay,
+ 0,
+ proposal_amt,
+ ]
+ )
+
+ lineage_proof = [treasury_id, treasury_inner_puzhash, treasury_amount]
+ full_treasury_solution = Program.to([lineage_proof, treasury_amount, treasury_solution])
+ full_proposal_solution = Program.to([lineage_proof, proposal_amt, proposal_solution])
+
+ treasury_conds = conditions_dict_for_solution(full_treasury_puz, full_treasury_solution, INFINITE_COST)
+ proposal_conds = conditions_dict_for_solution(full_proposal, full_proposal_solution, INFINITE_COST)
+
+ treasury_aca = treasury_conds[ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT][0].vars[0]
+ proposal_cca = proposal_conds[ConditionOpcode.CREATE_COIN_ANNOUNCEMENT][0].vars[0]
+ assert std_hash(proposal_coin_id + proposal_cca) == treasury_aca
+
+ treasury_cpas = [
+ std_hash(full_treasury_puzhash + cond.vars[0])
+ for cond in treasury_conds[ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT]
+ ]
+ proposal_apas = [cond.vars[0] for cond in proposal_conds[ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT]]
+ assert treasury_cpas[1] == proposal_apas[1]
+
+ new_treasury_inner = update_proposal.run(update_proposal_sol).at("frf").as_atom()
+ expected_treasury_inner = get_treasury_puzzle(new_dao_rules, treasury_id, CAT_TAIL_HASH)
+ assert new_treasury_inner == expected_treasury_inner.get_tree_hash()
+
+ expected_treasury_hash = curry_singleton(treasury_id, expected_treasury_inner).get_tree_hash()
+ assert treasury_conds[ConditionOpcode.CREATE_COIN][1].vars[0] == expected_treasury_hash
+
+
+async def do_spend(
+ sim: SpendSim,
+ sim_client: SimClient,
+ coins: List[Coin],
+ puzzles: List[Program],
+ solutions: List[Program],
+) -> Tuple[MempoolInclusionStatus, Optional[Err]]:
+ spends = []
+ for coin, puzzle, solution in zip(coins, puzzles, solutions):
+ spends.append(CoinSpend(coin, puzzle, solution))
+ spend_bundle = SpendBundle(spends, AugSchemeMPL.aggregate([]))
+ result = await sim_client.push_tx(spend_bundle)
+ await sim.farm_block()
+ return result
+
+
+@pytest.mark.asyncio()
+async def test_singleton_aggregator() -> None:
+ async with sim_and_client() as (sim, sim_client):
+ aggregator = P2_SINGLETON_AGGREGATOR_MOD
+ aggregator_hash = aggregator.get_tree_hash()
+ await sim.farm_block(aggregator_hash)
+ await sim.farm_block(aggregator_hash)
+ for i in range(5):
+ await sim.farm_block()
+
+ coin_records = await sim_client.get_coin_records_by_puzzle_hash(aggregator_hash)
+ coins = [c.coin for c in coin_records]
+
+ output_coin = coins[0]
+ output_sol = Program.to(
+ [
+ output_coin.name(),
+ output_coin.puzzle_hash,
+ output_coin.amount,
+ [[c.parent_coin_info, c.puzzle_hash, c.amount] for c in coins[1:]],
+ ]
+ )
+ merge_sols = [
+ Program.to([c.name(), c.puzzle_hash, c.amount, [], [output_coin.parent_coin_info, output_coin.amount]])
+ for c in coins[1:]
+ ]
+
+ res = await do_spend(sim, sim_client, coins, [aggregator] * 4, [output_sol, *merge_sols])
+ assert res[0] == MempoolInclusionStatus.SUCCESS
+
+ await sim.rewind(uint32(sim.block_height - 1))
+
+ # Spend a merge coin with empty output details
+ output_sol = Program.to(
+ [
+ output_coin.name(),
+ output_coin.puzzle_hash,
+ output_coin.amount,
+ [],
+ [],
+ ]
+ )
+ res = await do_spend(sim, sim_client, [output_coin], [aggregator], [output_sol])
+ assert res[0] == MempoolInclusionStatus.FAILED
+
+ # Try to steal treasury coins with a phoney output
+ acs = Program.to(1)
+ acs_ph = acs.get_tree_hash()
+ await sim.farm_block(acs_ph)
+ bad_coin = (await sim_client.get_coin_records_by_puzzle_hash(acs_ph))[0].coin
+ bad_sol = Program.to(
+ [
+ [ConditionOpcode.CREATE_COIN, acs_ph, sum(c.amount for c in coins)],
+ *[[ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, std_hash(c.name())] for c in coins],
+ [ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, 0],
+ ]
+ )
+
+ merge_sols = [
+ Program.to([c.name(), c.puzzle_hash, c.amount, [], [bad_coin.parent_coin_info, bad_coin.amount]])
+ for c in coins
+ ]
+
+ res = await do_spend(sim, sim_client, [bad_coin, *coins], [acs] + [aggregator] * 4, [bad_sol, *merge_sols])
+ assert res[0] == MempoolInclusionStatus.FAILED
diff --git a/tests/wallet/dao_wallet/test_dao_wallets.py b/tests/wallet/dao_wallet/test_dao_wallets.py
new file mode 100644
--- /dev/null
+++ b/tests/wallet/dao_wallet/test_dao_wallets.py
@@ -0,0 +1,3649 @@
+from __future__ import annotations
+
+import asyncio
+import time
+from typing import Any, Callable, Dict, List, Optional, Tuple, Union
+
+import pytest
+
+from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
+from chia.rpc.wallet_rpc_api import WalletRpcApi
+from chia.rpc.wallet_rpc_client import WalletRpcClient
+from chia.simulator.setup_nodes import SimulatorsAndWallets, SimulatorsAndWalletsServices
+from chia.simulator.simulator_protocol import FarmNewBlockProtocol, ReorgProtocol
+from chia.simulator.time_out_assert import adjusted_timeout, time_out_assert, time_out_assert_not_none
+from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.peer_info import PeerInfo
+from chia.types.spend_bundle import SpendBundle
+from chia.util.bech32m import encode_puzzle_hash
+from chia.util.ints import uint32, uint64, uint128
+from chia.wallet.cat_wallet.cat_wallet import CATWallet
+from chia.wallet.cat_wallet.dao_cat_wallet import DAOCATWallet
+from chia.wallet.dao_wallet.dao_info import DAORules
+from chia.wallet.dao_wallet.dao_utils import (
+ generate_mint_proposal_innerpuz,
+ generate_simple_proposal_innerpuz,
+ generate_update_proposal_innerpuz,
+)
+from chia.wallet.dao_wallet.dao_wallet import DAOWallet
+from chia.wallet.transaction_record import TransactionRecord
+from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG
+from tests.conftest import ConsensusMode
+from tests.util.rpc import validate_get_routes
+
+
+async def get_proposal_state(wallet: DAOWallet, index: int) -> Tuple[Optional[bool], Optional[bool]]:
+ return wallet.dao_info.proposals_list[index].passed, wallet.dao_info.proposals_list[index].closed
+
+
+async def rpc_state(
+ timeout: float,
+ async_function: Callable[[Any], Any],
+ params: List[Union[int, Dict[str, Any]]],
+ condition_func: Callable[[Dict[str, Any]], Any],
+ result: Optional[Any] = None,
+) -> Union[bool, Dict[str, Any]]: # pragma: no cover
+ __tracebackhide__ = True
+
+ timeout = adjusted_timeout(timeout=timeout)
+
+ start = time.monotonic()
+
+ while True:
+ resp = await async_function(*params)
+ assert isinstance(resp, dict)
+ try:
+ if result:
+ if condition_func(resp) == result:
+ return True
+ else:
+ if condition_func(resp):
+ return resp
+ except IndexError:
+ continue
+
+ now = time.monotonic()
+ elapsed = now - start
+ if elapsed >= timeout:
+ raise asyncio.TimeoutError(
+ f"timed out while waiting for {async_function.__name__}(): {elapsed} >= {timeout}",
+ )
+
+ await asyncio.sleep(0.3)
+
+
+puzzle_hash_0 = bytes32(32 * b"0")
+
+
+@pytest.mark.limit_consensus_modes(reason="does not depend on consensus rules")
+@pytest.mark.parametrize(
+ "trusted",
+ [True, False],
+)
+@pytest.mark.asyncio
+async def test_dao_creation(
+ self_hostname: str, three_wallet_nodes: SimulatorsAndWallets, trusted: bool, consensus_mode: ConsensusMode
+) -> None:
+ num_blocks = 1
+ full_nodes, wallets, _ = three_wallet_nodes
+ full_node_api = full_nodes[0]
+ full_node_server = full_node_api.server
+ wallet_node_0, server_0 = wallets[0]
+ wallet_node_1, server_1 = wallets[1]
+ wallet = wallet_node_0.wallet_state_manager.main_wallet
+ wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
+ ph = await wallet.get_new_puzzlehash()
+ ph_1 = await wallet_1.get_new_puzzlehash()
+
+ if trusted:
+ wallet_node_0.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ wallet_node_1.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ else:
+ wallet_node_0.config["trusted_peers"] = {}
+ wallet_node_1.config["trusted_peers"] = {}
+
+ await server_0.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_1.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+
+ for i in range(0, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_1))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+
+ funds = sum(
+ [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks + 1)]
+ )
+
+ await time_out_assert(20, wallet.get_confirmed_balance, funds)
+ await time_out_assert(20, full_node_api.wallet_is_synced, True, wallet_node_0)
+
+ cat_amt = 2000
+ dao_rules = DAORules(
+ proposal_timelock=uint64(10),
+ soft_close_length=uint64(5),
+ attendance_required=uint64(1000), # 10%
+ pass_percentage=uint64(5100), # 51%
+ self_destruct_length=uint64(20),
+ oracle_spend_delay=uint64(10),
+ proposal_minimum_amount=uint64(1),
+ )
+
+ fee = uint64(10)
+ fee_for_cat = uint64(20)
+
+ # Try to create a DAO with more CATs than xch balance
+ with pytest.raises(ValueError) as e_info:
+ dao_wallet_0 = await DAOWallet.create_new_dao_and_wallet(
+ wallet_node_0.wallet_state_manager,
+ wallet,
+ uint64(funds + 1),
+ dao_rules,
+ DEFAULT_TX_CONFIG,
+ fee=fee,
+ fee_for_cat=fee_for_cat,
+ )
+ assert e_info.value.args[0] == f"Your balance of {funds} mojos is not enough to create {funds + 1} CATs"
+
+ dao_wallet_0 = await DAOWallet.create_new_dao_and_wallet(
+ wallet_node_0.wallet_state_manager,
+ wallet,
+ uint64(cat_amt * 2),
+ dao_rules,
+ DEFAULT_TX_CONFIG,
+ fee=fee,
+ fee_for_cat=fee_for_cat,
+ )
+ assert dao_wallet_0 is not None
+
+ tx_queue: List[TransactionRecord] = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=tx_queue)
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ # Check the spend was successful
+ treasury_id = dao_wallet_0.dao_info.treasury_id
+
+ # check the dao wallet balances
+ await time_out_assert(20, dao_wallet_0.get_confirmed_balance, uint128(1))
+ await time_out_assert(20, dao_wallet_0.get_unconfirmed_balance, uint128(1))
+ await time_out_assert(20, dao_wallet_0.get_pending_change_balance, uint64(0))
+
+ # check select coins
+ no_coins = await dao_wallet_0.select_coins(uint64(2), DEFAULT_TX_CONFIG)
+ assert no_coins == set()
+ selected_coins = await dao_wallet_0.select_coins(uint64(1), DEFAULT_TX_CONFIG)
+ assert len(selected_coins) == 1
+
+ # get the cat wallets
+ cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.cat_wallet_id]
+ dao_cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.dao_cat_wallet_id]
+ # Some dao_cat_wallet checks for coverage
+ dao_cat_wallet_0.get_name()
+ assert (await dao_cat_wallet_0.select_coins(uint64(1), DEFAULT_TX_CONFIG)) == set()
+ dao_cat_puzhash = await dao_cat_wallet_0.get_new_puzzlehash()
+ assert dao_cat_puzhash
+ dao_cat_inner = await dao_cat_wallet_0.get_new_inner_puzzle(DEFAULT_TX_CONFIG)
+ assert dao_cat_inner
+ dao_cat_inner_hash = await dao_cat_wallet_0.get_new_inner_hash(DEFAULT_TX_CONFIG)
+ assert dao_cat_inner_hash
+
+ cat_wallet_0_bal = await cat_wallet_0.get_confirmed_balance()
+ assert cat_wallet_0_bal == cat_amt * 2
+
+ # Create the other user's wallet from the treasury id
+ dao_wallet_1 = await DAOWallet.create_new_dao_wallet_for_existing_dao(
+ wallet_node_1.wallet_state_manager,
+ wallet_1,
+ treasury_id,
+ )
+ assert dao_wallet_1 is not None
+ assert dao_wallet_0.dao_info.treasury_id == dao_wallet_1.dao_info.treasury_id
+
+ # Get the cat wallets for wallet_1
+ cat_wallet_1 = dao_wallet_1.wallet_state_manager.wallets[dao_wallet_1.dao_info.cat_wallet_id]
+ assert cat_wallet_1
+
+ # Send some cats to the dao_cat lockup
+ dao_cat_amt = uint64(100)
+ txs = await dao_wallet_0.enter_dao_cat_voting_mode(dao_cat_amt, DEFAULT_TX_CONFIG)
+ for tx in txs:
+ await wallet.wallet_state_manager.add_pending_transaction(tx)
+ sb = txs[0].spend_bundle
+ assert isinstance(sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
+ await full_node_api.process_transaction_records(records=txs)
+
+ for i in range(1, num_blocks): # pragma: no cover
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ # Test that we can get spendable coins from both cat and dao_cat wallet
+ fake_proposal_id = Program.to("proposal_id").get_tree_hash()
+ spendable_coins = await dao_cat_wallet_0.wallet_state_manager.get_spendable_coins_for_wallet(
+ dao_cat_wallet_0.id(), None
+ )
+
+ assert len(spendable_coins) > 0
+ coins = await dao_cat_wallet_0.advanced_select_coins(1, fake_proposal_id)
+ assert len(coins) > 0
+ # check that we have selected the coin from dao_cat_wallet
+ assert list(coins)[0].coin.amount == dao_cat_amt
+
+ # send some cats from wallet_0 to wallet_1 so we can test voting
+ cat_txs = await cat_wallet_0.generate_signed_transaction([cat_amt], [ph_1], DEFAULT_TX_CONFIG)
+ await wallet.wallet_state_manager.add_pending_transaction(cat_txs[0])
+ sb = cat_txs[0].spend_bundle
+ assert isinstance(sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
+ await full_node_api.process_transaction_records(records=cat_txs)
+
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await time_out_assert(10, cat_wallet_1.get_confirmed_balance, cat_amt)
+
+ # Smaller tests of dao_wallet funcs for coverage
+ await dao_wallet_0.adjust_filter_level(uint64(10))
+ assert dao_wallet_0.dao_info.filter_below_vote_amount == uint64(10)
+
+ await dao_wallet_0.set_name("Renamed Wallet")
+ assert dao_wallet_0.get_name() == "Renamed Wallet"
+
+ new_inner_puzhash = await dao_wallet_0.get_new_p2_inner_hash()
+ assert isinstance(new_inner_puzhash, bytes32)
+
+ # run DAOCATwallet.create for coverage
+ create_dao_cat_from_info = await DAOCATWallet.create(
+ wallet.wallet_state_manager, wallet, dao_cat_wallet_0.wallet_info
+ )
+ assert create_dao_cat_from_info
+ create_dao_wallet_from_info = await DAOWallet.create(wallet.wallet_state_manager, wallet, dao_wallet_0.wallet_info)
+ assert create_dao_wallet_from_info
+
+
+@pytest.mark.limit_consensus_modes(reason="does not depend on consensus rules")
+@pytest.mark.parametrize(
+ "trusted",
+ [True, False],
+)
+@pytest.mark.asyncio
+async def test_dao_funding(
+ self_hostname: str, three_wallet_nodes: SimulatorsAndWallets, trusted: bool, consensus_mode: ConsensusMode
+) -> None:
+ num_blocks = 1
+ full_nodes, wallets, _ = three_wallet_nodes
+ full_node_api = full_nodes[0]
+ full_node_server = full_node_api.server
+ wallet_node_0, server_0 = wallets[0]
+ wallet_node_1, server_1 = wallets[1]
+ wallet_node_2, server_2 = wallets[2]
+ wallet = wallet_node_0.wallet_state_manager.main_wallet
+ wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
+ wallet_2 = wallet_node_1.wallet_state_manager.main_wallet
+ ph = await wallet.get_new_puzzlehash()
+ ph_1 = await wallet_1.get_new_puzzlehash()
+ ph_2 = await wallet_2.get_new_puzzlehash()
+
+ if trusted:
+ wallet_node_0.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ wallet_node_1.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ wallet_node_2.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ else:
+ wallet_node_0.config["trusted_peers"] = {}
+ wallet_node_1.config["trusted_peers"] = {}
+ wallet_node_2.config["trusted_peers"] = {}
+
+ await server_0.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_1.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_2.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+
+ for i in range(0, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_1))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_2))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ funds = sum(
+ [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks + 1)]
+ )
+
+ await time_out_assert(20, wallet.get_confirmed_balance, funds)
+ await time_out_assert(20, full_node_api.wallet_is_synced, True, wallet_node_0)
+
+ cat_amt = 300000
+ dao_rules = DAORules(
+ proposal_timelock=uint64(5),
+ soft_close_length=uint64(5),
+ attendance_required=uint64(1000), # 10%
+ pass_percentage=uint64(5100), # 51%
+ self_destruct_length=uint64(20),
+ oracle_spend_delay=uint64(10),
+ proposal_minimum_amount=uint64(1),
+ )
+
+ dao_wallet_0 = await DAOWallet.create_new_dao_and_wallet(
+ wallet_node_0.wallet_state_manager,
+ wallet,
+ uint64(cat_amt),
+ dao_rules,
+ DEFAULT_TX_CONFIG,
+ )
+ assert dao_wallet_0 is not None
+
+ treasury_id = dao_wallet_0.dao_info.treasury_id
+
+ # Get the full node sim to process the wallet creation spend
+ tx_queue: List[TransactionRecord] = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ tx_record = tx_queue[0]
+ await full_node_api.process_transaction_records(records=[tx_record])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ # get the cat wallets
+ cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.cat_wallet_id]
+ await time_out_assert(20, cat_wallet_0.get_confirmed_balance, cat_amt)
+
+ # Create funding spends for xch and cat
+ xch_funds = uint64(500000)
+ cat_funds = uint64(100000)
+ funding_tx = await dao_wallet_0.create_add_funds_to_treasury_spend(xch_funds, DEFAULT_TX_CONFIG)
+ await wallet.wallet_state_manager.add_pending_transaction(funding_tx)
+ funding_sb = funding_tx.spend_bundle
+ assert isinstance(funding_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, funding_sb.name())
+ await full_node_api.process_transaction_records(records=[funding_tx])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ # Check that the funding spend is found
+ await time_out_assert(20, dao_wallet_0.get_balance_by_asset_type, xch_funds)
+
+ cat_funding_tx = await dao_wallet_0.create_add_funds_to_treasury_spend(
+ cat_funds, DEFAULT_TX_CONFIG, funding_wallet_id=cat_wallet_0.id()
+ )
+ await wallet.wallet_state_manager.add_pending_transaction(cat_funding_tx)
+ cat_funding_sb = cat_funding_tx.spend_bundle
+ assert isinstance(cat_funding_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, cat_funding_sb.name())
+ await full_node_api.process_transaction_records(records=[cat_funding_tx])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await time_out_assert(20, cat_wallet_0.get_confirmed_balance, cat_amt - cat_funds)
+
+ # Check that the funding spend is found
+ cat_id = bytes32.from_hexstr(cat_wallet_0.get_asset_id())
+ await time_out_assert(20, dao_wallet_0.get_balance_by_asset_type, cat_funds, cat_id)
+
+ # Create and close a proposal
+ cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.cat_wallet_id]
+ dao_cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.dao_cat_wallet_id]
+ dao_cat_0_bal = await dao_cat_wallet_0.get_votable_balance()
+ txs_0 = await dao_cat_wallet_0.enter_dao_cat_voting_mode(dao_cat_0_bal, DEFAULT_TX_CONFIG)
+ for tx in txs_0:
+ await wallet.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb_0 = txs_0[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb_0.name())
+ await full_node_api.process_transaction_records(records=txs_0)
+ recipient_puzzle_hash = await wallet_2.get_new_puzzlehash()
+ proposal_amount_1 = uint64(10000)
+ xch_proposal_inner = generate_simple_proposal_innerpuz(
+ treasury_id,
+ [recipient_puzzle_hash],
+ [proposal_amount_1],
+ [None],
+ )
+ proposal_tx = await dao_wallet_0.generate_new_proposal(xch_proposal_inner, DEFAULT_TX_CONFIG, dao_cat_0_bal)
+ await wallet.wallet_state_manager.add_pending_transaction(proposal_tx)
+ assert isinstance(proposal_tx, TransactionRecord)
+ proposal_sb = proposal_tx.spend_bundle
+ assert isinstance(proposal_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, proposal_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[proposal_sb])
+ for _ in range(5):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ prop_0 = dao_wallet_0.dao_info.proposals_list[0]
+ close_tx_0 = await dao_wallet_0.create_proposal_close_spend(prop_0.proposal_id, DEFAULT_TX_CONFIG)
+ await wallet.wallet_state_manager.add_pending_transaction(close_tx_0)
+ close_sb_0 = close_tx_0.spend_bundle
+ assert close_sb_0 is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, close_sb_0.name())
+ await full_node_api.process_spend_bundles(bundles=[close_sb_0])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ # Create the other user's wallet from the treasury id
+ dao_wallet_1 = await DAOWallet.create_new_dao_wallet_for_existing_dao(
+ wallet_node_1.wallet_state_manager,
+ wallet_1,
+ treasury_id,
+ )
+ assert dao_wallet_1 is not None
+ assert dao_wallet_1.dao_info.treasury_id == dao_wallet_1.dao_info.treasury_id
+
+ # Get the cat wallets for wallet_1
+ cat_wallet_1 = dao_wallet_1.wallet_state_manager.wallets[dao_wallet_1.dao_info.cat_wallet_id]
+ assert cat_wallet_1
+ assert cat_wallet_1.cat_info.limitations_program_hash == cat_id
+
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await time_out_assert(30, dao_wallet_0.get_balance_by_asset_type, xch_funds - 10000)
+ await time_out_assert(30, dao_wallet_0.get_balance_by_asset_type, cat_funds, cat_id)
+ await time_out_assert(30, dao_wallet_1.get_balance_by_asset_type, xch_funds - 10000)
+ await time_out_assert(30, dao_wallet_1.get_balance_by_asset_type, cat_funds, cat_id)
+
+
+@pytest.mark.limit_consensus_modes(reason="does not depend on consensus rules")
+@pytest.mark.parametrize(
+ "trusted",
+ [True, False],
+)
+@pytest.mark.asyncio
+async def test_dao_proposals(
+ self_hostname: str, three_wallet_nodes: SimulatorsAndWallets, trusted: bool, consensus_mode: ConsensusMode
+) -> None:
+ """
+ Test a set of proposals covering:
+ - the spend, update, and mint types.
+ - passing and failing
+ - force closing broken proposals
+
+ total cats issued: 300k
+ each wallet holds: 100k
+
+ The proposal types and amounts voted are:
+ P0 Spend => Pass
+ P1 Mint => Pass
+ P2 Update => Pass
+ P3 Spend => Fail
+ P4 Bad Spend => Force Close
+
+ """
+ num_blocks = 1
+ full_nodes, wallets, _ = three_wallet_nodes
+ full_node_api = full_nodes[0]
+ full_node_server = full_node_api.server
+ wallet_node_0, server_0 = wallets[0]
+ wallet_node_1, server_1 = wallets[1]
+ wallet_node_2, server_2 = wallets[2]
+ wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
+ wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
+ wallet_2 = wallet_node_2.wallet_state_manager.main_wallet
+ ph_0 = await wallet_0.get_new_puzzlehash()
+ ph_1 = await wallet_1.get_new_puzzlehash()
+ ph_2 = await wallet_2.get_new_puzzlehash()
+
+ if trusted:
+ wallet_node_0.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ wallet_node_1.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ wallet_node_2.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ else:
+ wallet_node_0.config["trusted_peers"] = {}
+ wallet_node_1.config["trusted_peers"] = {}
+ wallet_node_2.config["trusted_peers"] = {}
+
+ await server_0.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_1.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_2.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+
+ for i in range(0, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_0))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_1))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_2))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+
+ funds = sum(
+ [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks + 1)]
+ )
+
+ await time_out_assert(20, wallet_0.get_confirmed_balance, funds)
+ await time_out_assert(20, full_node_api.wallet_is_synced, True, wallet_node_0)
+
+ # set a standard fee amount to use in all txns
+ base_fee = uint64(100)
+
+ # set the cat issuance and DAO rules
+ cat_issuance = 300000
+ proposal_min_amt = uint64(101)
+ dao_rules = DAORules(
+ proposal_timelock=uint64(10),
+ soft_close_length=uint64(5),
+ attendance_required=uint64(190000),
+ pass_percentage=uint64(5100), # 51%
+ self_destruct_length=uint64(20),
+ oracle_spend_delay=uint64(10),
+ proposal_minimum_amount=proposal_min_amt,
+ )
+
+ # Create the DAO.
+ # This takes two steps: create the treasury singleton, wait for oracle_spend_delay and
+ # then complete the eve spend
+ dao_wallet_0 = await DAOWallet.create_new_dao_and_wallet(
+ wallet_node_0.wallet_state_manager,
+ wallet_0,
+ uint64(cat_issuance),
+ dao_rules,
+ DEFAULT_TX_CONFIG,
+ )
+ assert dao_wallet_0 is not None
+
+ tx_queue: List[TransactionRecord] = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ tx_record = tx_queue[0]
+ await full_node_api.process_transaction_records(records=[tx_record])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.cat_wallet_id]
+ dao_cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.dao_cat_wallet_id]
+ await time_out_assert(10, cat_wallet_0.get_confirmed_balance, cat_issuance)
+ assert dao_cat_wallet_0
+
+ treasury_id = dao_wallet_0.dao_info.treasury_id
+
+ # Create dao_wallet_1 from the treasury id
+ dao_wallet_1 = await DAOWallet.create_new_dao_wallet_for_existing_dao(
+ wallet_node_1.wallet_state_manager,
+ wallet_1,
+ treasury_id,
+ )
+ assert dao_wallet_1 is not None
+ assert dao_wallet_1.dao_info.treasury_id == treasury_id
+ cat_wallet_1 = dao_wallet_1.wallet_state_manager.wallets[dao_wallet_1.dao_info.cat_wallet_id]
+ dao_cat_wallet_1 = dao_wallet_1.wallet_state_manager.wallets[dao_wallet_1.dao_info.dao_cat_wallet_id]
+ assert cat_wallet_1
+ assert dao_cat_wallet_1
+
+ # Create dao_wallet_2 from the treasury id
+ dao_wallet_2 = await DAOWallet.create_new_dao_wallet_for_existing_dao(
+ wallet_node_2.wallet_state_manager,
+ wallet_2,
+ treasury_id,
+ )
+ assert dao_wallet_2 is not None
+ assert dao_wallet_2.dao_info.treasury_id == treasury_id
+ cat_wallet_2 = dao_wallet_2.wallet_state_manager.wallets[dao_wallet_2.dao_info.cat_wallet_id]
+ dao_cat_wallet_2 = dao_wallet_2.wallet_state_manager.wallets[dao_wallet_2.dao_info.dao_cat_wallet_id]
+ assert cat_wallet_2
+ assert dao_cat_wallet_2
+
+ # Send 100k cats to wallet_1 and wallet_2
+ cat_amt = uint64(100000)
+ cat_tx = await cat_wallet_0.generate_signed_transaction(
+ [cat_amt, cat_amt], [ph_1, ph_2], DEFAULT_TX_CONFIG, fee=base_fee
+ )
+ cat_sb = cat_tx[0].spend_bundle
+ await wallet_0.wallet_state_manager.add_pending_transaction(cat_tx[0])
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, cat_sb.name())
+ await full_node_api.process_transaction_records(records=cat_tx)
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ # Lockup voting cats for all wallets
+ dao_cat_0_bal = await dao_cat_wallet_0.get_votable_balance()
+ txs_0 = await dao_cat_wallet_0.enter_dao_cat_voting_mode(dao_cat_0_bal, DEFAULT_TX_CONFIG, fee=base_fee)
+ for tx in txs_0:
+ await wallet_0.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb_0 = txs_0[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb_0.name())
+ await full_node_api.process_transaction_records(records=txs_0)
+
+ dao_cat_1_bal = await dao_cat_wallet_1.get_votable_balance()
+ txs_1 = await dao_cat_wallet_1.enter_dao_cat_voting_mode(dao_cat_1_bal, DEFAULT_TX_CONFIG)
+ for tx in txs_1:
+ await wallet_1.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb_1 = txs_1[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb_1.name())
+ await full_node_api.process_transaction_records(records=txs_1)
+
+ dao_cat_2_bal = await dao_cat_wallet_2.get_votable_balance()
+ txs_2 = await dao_cat_wallet_2.enter_dao_cat_voting_mode(dao_cat_2_bal, DEFAULT_TX_CONFIG)
+ for tx in txs_2:
+ await wallet_2.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb_2 = txs_2[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb_2.name())
+ await full_node_api.process_transaction_records(records=txs_2)
+
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_2, timeout=30)
+
+ await time_out_assert(10, dao_cat_wallet_0.get_confirmed_balance, cat_amt)
+ await time_out_assert(10, dao_cat_wallet_1.get_confirmed_balance, cat_amt)
+ await time_out_assert(10, dao_cat_wallet_2.get_confirmed_balance, cat_amt)
+
+ # Create funding spend so the treasury holds some XCH
+ xch_funds = uint64(500000)
+ funding_tx = await dao_wallet_0.create_add_funds_to_treasury_spend(xch_funds, DEFAULT_TX_CONFIG)
+ await wallet_0.wallet_state_manager.add_pending_transaction(funding_tx)
+ funding_sb = funding_tx.spend_bundle
+ assert isinstance(funding_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, funding_sb.name())
+ await full_node_api.process_transaction_records(records=[funding_tx])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ # Check that the funding spend is recognized by all wallets
+ await time_out_assert(10, dao_wallet_0.get_balance_by_asset_type, xch_funds)
+ await time_out_assert(10, dao_wallet_1.get_balance_by_asset_type, xch_funds)
+ await time_out_assert(10, dao_wallet_2.get_balance_by_asset_type, xch_funds)
+
+ # Create Proposals
+
+ # Proposal 0: Spend xch to wallet_2.
+ recipient_puzzle_hash = await wallet_2.get_new_puzzlehash()
+ proposal_amount_1 = uint64(9998)
+ xch_proposal_inner = generate_simple_proposal_innerpuz(
+ treasury_id,
+ [recipient_puzzle_hash],
+ [proposal_amount_1],
+ [None],
+ )
+ proposal_tx = await dao_wallet_0.generate_new_proposal(
+ xch_proposal_inner, DEFAULT_TX_CONFIG, dao_cat_0_bal, fee=base_fee
+ )
+ await wallet_0.wallet_state_manager.add_pending_transaction(proposal_tx)
+ assert isinstance(proposal_tx, TransactionRecord)
+ proposal_sb = proposal_tx.spend_bundle
+ assert isinstance(proposal_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, proposal_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[proposal_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ assert len(dao_wallet_0.dao_info.proposals_list) == 1
+ assert dao_wallet_0.dao_info.proposals_list[0].amount_voted == dao_cat_0_bal
+ assert dao_wallet_0.dao_info.proposals_list[0].timer_coin is not None
+ prop_0 = dao_wallet_0.dao_info.proposals_list[0]
+
+ # Proposal 1: Mint new CATs
+ new_mint_amount = uint64(1000)
+ mint_proposal_inner = await generate_mint_proposal_innerpuz(
+ treasury_id,
+ cat_wallet_0.cat_info.limitations_program_hash,
+ new_mint_amount,
+ recipient_puzzle_hash,
+ )
+
+ proposal_tx = await dao_wallet_0.generate_new_proposal(
+ mint_proposal_inner, DEFAULT_TX_CONFIG, vote_amount=dao_cat_0_bal, fee=base_fee
+ )
+ await wallet_0.wallet_state_manager.add_pending_transaction(proposal_tx)
+ assert isinstance(proposal_tx, TransactionRecord)
+ proposal_sb = proposal_tx.spend_bundle
+ assert isinstance(proposal_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, proposal_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[proposal_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ assert len(dao_wallet_0.dao_info.proposals_list) == 2
+ prop_1 = dao_wallet_0.dao_info.proposals_list[1]
+
+ # Proposal 2: Update DAO Rules.
+ new_dao_rules = DAORules(
+ proposal_timelock=uint64(8),
+ soft_close_length=uint64(4),
+ attendance_required=uint64(150000),
+ pass_percentage=uint64(7500),
+ self_destruct_length=uint64(12),
+ oracle_spend_delay=uint64(5),
+ proposal_minimum_amount=uint64(1),
+ )
+ current_innerpuz = dao_wallet_0.dao_info.current_treasury_innerpuz
+ assert current_innerpuz is not None
+ update_inner = await generate_update_proposal_innerpuz(current_innerpuz, new_dao_rules)
+ proposal_tx = await dao_wallet_0.generate_new_proposal(update_inner, DEFAULT_TX_CONFIG, dao_cat_0_bal, fee=base_fee)
+ await wallet_0.wallet_state_manager.add_pending_transaction(proposal_tx)
+ assert isinstance(proposal_tx, TransactionRecord)
+ proposal_sb = proposal_tx.spend_bundle
+ assert isinstance(proposal_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, proposal_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[proposal_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ assert len(dao_wallet_0.dao_info.proposals_list) == 3
+ prop_2 = dao_wallet_0.dao_info.proposals_list[2]
+
+ # Proposal 3: Spend xch to wallet_2 (this prop will close as failed)
+ proposal_amount_2 = uint64(500)
+ xch_proposal_inner = generate_simple_proposal_innerpuz(
+ treasury_id, [recipient_puzzle_hash], [proposal_amount_2], [None]
+ )
+ proposal_tx = await dao_wallet_0.generate_new_proposal(
+ xch_proposal_inner, DEFAULT_TX_CONFIG, dao_cat_0_bal, fee=base_fee
+ )
+ await wallet_0.wallet_state_manager.add_pending_transaction(proposal_tx)
+ assert isinstance(proposal_tx, TransactionRecord)
+ proposal_sb = proposal_tx.spend_bundle
+ assert isinstance(proposal_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, proposal_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[proposal_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ assert len(dao_wallet_0.dao_info.proposals_list) == 4
+ prop_3 = dao_wallet_0.dao_info.proposals_list[3]
+
+ # Proposal 4: Create a 'bad' proposal (can't be executed, must be force-closed)
+ xch_proposal_inner = Program.to(["x"])
+ proposal_tx = await dao_wallet_0.generate_new_proposal(
+ xch_proposal_inner, DEFAULT_TX_CONFIG, dao_cat_0_bal, fee=base_fee
+ )
+ await wallet_0.wallet_state_manager.add_pending_transaction(proposal_tx)
+ assert isinstance(proposal_tx, TransactionRecord)
+ proposal_sb = proposal_tx.spend_bundle
+ assert isinstance(proposal_sb, SpendBundle)
+ await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, proposal_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[proposal_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ assert len(dao_wallet_0.dao_info.proposals_list) == 5
+ assert len(dao_wallet_1.dao_info.proposals_list) == 5
+ assert len(dao_wallet_1.dao_info.proposals_list) == 5
+ prop_4 = dao_wallet_0.dao_info.proposals_list[4]
+
+ # Proposal 0 Voting: wallet 1 votes yes, wallet 2 votes no. Proposal Passes
+ vote_tx_1 = await dao_wallet_1.generate_proposal_vote_spend(
+ prop_0.proposal_id, dao_cat_1_bal, True, DEFAULT_TX_CONFIG
+ )
+ await wallet_1.wallet_state_manager.add_pending_transaction(vote_tx_1)
+ vote_sb_1 = vote_tx_1.spend_bundle
+ assert vote_sb_1 is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, vote_sb_1.name())
+ await full_node_api.process_spend_bundles(bundles=[vote_sb_1])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ vote_tx_2 = await dao_wallet_2.generate_proposal_vote_spend(
+ prop_0.proposal_id, dao_cat_2_bal, False, DEFAULT_TX_CONFIG
+ )
+ await wallet_2.wallet_state_manager.add_pending_transaction(vote_tx_2)
+ vote_sb_2 = vote_tx_2.spend_bundle
+ assert vote_sb_2 is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, vote_sb_2.name())
+ await full_node_api.process_spend_bundles(bundles=[vote_sb_2])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_2, timeout=30)
+
+ total_votes = dao_cat_0_bal + dao_cat_1_bal + dao_cat_2_bal
+ assert dao_wallet_0.dao_info.proposals_list[0].amount_voted == total_votes
+ assert dao_wallet_0.dao_info.proposals_list[0].yes_votes == total_votes - dao_cat_2_bal
+ assert dao_wallet_1.dao_info.proposals_list[0].amount_voted == total_votes
+ assert dao_wallet_1.dao_info.proposals_list[0].yes_votes == total_votes - dao_cat_2_bal
+ assert dao_wallet_2.dao_info.proposals_list[0].amount_voted == total_votes
+ assert dao_wallet_2.dao_info.proposals_list[0].yes_votes == total_votes - dao_cat_2_bal
+
+ prop_0_state = await dao_wallet_0.get_proposal_state(prop_0.proposal_id)
+ assert prop_0_state["passed"]
+ assert prop_0_state["closable"]
+
+ # Proposal 0 is closable, but soft_close_length has not passed.
+ close_tx_0 = await dao_wallet_0.create_proposal_close_spend(prop_0.proposal_id, DEFAULT_TX_CONFIG)
+ close_sb_0 = close_tx_0.spend_bundle
+ assert close_sb_0 is not None
+ with pytest.raises(AssertionError) as e:
+ await wallet_0.wallet_state_manager.add_pending_transaction(close_tx_0)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, close_sb_0.name())
+ assert e.value.args[0] == "Timed assertion timed out"
+
+ for _ in range(5):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ # Proposal 0: Close
+ close_tx_0 = await dao_wallet_0.create_proposal_close_spend(prop_0.proposal_id, DEFAULT_TX_CONFIG)
+ await wallet_0.wallet_state_manager.add_pending_transaction(close_tx_0)
+ close_sb_0 = close_tx_0.spend_bundle
+ assert close_sb_0 is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, close_sb_0.name())
+ await full_node_api.process_spend_bundles(bundles=[close_sb_0])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_2, timeout=30)
+ await time_out_assert(20, wallet_2.get_confirmed_balance, funds + proposal_amount_1)
+ await time_out_assert(
+ 20, dao_wallet_0.get_balance_by_asset_type, xch_funds - proposal_amount_1 + proposal_min_amt - 1
+ )
+
+ await time_out_assert(20, get_proposal_state, (True, True), *[dao_wallet_0, 0])
+ await time_out_assert(20, get_proposal_state, (True, True), *[dao_wallet_1, 0])
+ await time_out_assert(20, get_proposal_state, (True, True), *[dao_wallet_2, 0])
+
+ # Proposal 1 vote and close
+ vote_tx_1 = await dao_wallet_1.generate_proposal_vote_spend(
+ prop_1.proposal_id, dao_cat_1_bal, True, DEFAULT_TX_CONFIG
+ )
+ await wallet_1.wallet_state_manager.add_pending_transaction(vote_tx_1)
+ vote_sb_1 = vote_tx_1.spend_bundle
+ assert vote_sb_1 is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, vote_sb_1.name())
+ await full_node_api.process_spend_bundles(bundles=[vote_sb_1])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ for _ in range(10):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_2, timeout=30)
+
+ prop_1_state = await dao_wallet_0.get_proposal_state(prop_1.proposal_id)
+ assert prop_1_state["passed"]
+ assert prop_1_state["closable"]
+
+ close_tx_1 = await dao_wallet_0.create_proposal_close_spend(prop_1.proposal_id, DEFAULT_TX_CONFIG)
+ await wallet_0.wallet_state_manager.add_pending_transaction(close_tx_1)
+ close_sb_1 = close_tx_1.spend_bundle
+ assert close_sb_1 is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, close_sb_1.name())
+ await full_node_api.process_spend_bundles(bundles=[close_sb_1])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_2, timeout=30)
+
+ await time_out_assert(20, cat_wallet_2.get_confirmed_balance, new_mint_amount)
+
+ # Proposal 2 vote and close
+ vote_tx_2 = await dao_wallet_1.generate_proposal_vote_spend(
+ prop_2.proposal_id, dao_cat_1_bal, True, DEFAULT_TX_CONFIG
+ )
+ await wallet_1.wallet_state_manager.add_pending_transaction(vote_tx_2)
+ vote_sb_2 = vote_tx_2.spend_bundle
+ assert vote_sb_2 is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, vote_sb_2.name())
+ await full_node_api.process_spend_bundles(bundles=[vote_sb_2])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ for _ in range(10):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_2, timeout=30)
+
+ prop_2_state = await dao_wallet_0.get_proposal_state(prop_2.proposal_id)
+ assert prop_2_state["passed"]
+ assert prop_2_state["closable"]
+
+ close_tx_2 = await dao_wallet_0.create_proposal_close_spend(prop_2.proposal_id, DEFAULT_TX_CONFIG)
+ await wallet_0.wallet_state_manager.add_pending_transaction(close_tx_2)
+ close_sb_2 = close_tx_2.spend_bundle
+ assert close_sb_2 is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, close_sb_2.name())
+ await full_node_api.process_spend_bundles(bundles=[close_sb_2])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_2, timeout=30)
+
+ assert dao_wallet_0.dao_rules == new_dao_rules
+ assert dao_wallet_1.dao_rules == new_dao_rules
+ assert dao_wallet_2.dao_rules == new_dao_rules
+
+ # Proposal 3 - Close as FAILED
+ vote_tx_3 = await dao_wallet_1.generate_proposal_vote_spend(
+ prop_3.proposal_id, dao_cat_1_bal, False, DEFAULT_TX_CONFIG
+ )
+ await wallet_1.wallet_state_manager.add_pending_transaction(vote_tx_3)
+ vote_sb_3 = vote_tx_3.spend_bundle
+ assert vote_sb_3 is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, vote_sb_3.name())
+ await full_node_api.process_spend_bundles(bundles=[vote_sb_3])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ for _ in range(10):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_2, timeout=30)
+
+ prop_3_state = await dao_wallet_1.get_proposal_state(prop_3.proposal_id)
+ assert not prop_3_state["passed"]
+ assert prop_3_state["closable"]
+
+ close_tx_3 = await dao_wallet_0.create_proposal_close_spend(prop_3.proposal_id, DEFAULT_TX_CONFIG)
+ await wallet_0.wallet_state_manager.add_pending_transaction(close_tx_3)
+ close_sb_3 = close_tx_3.spend_bundle
+ assert close_sb_3 is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, close_sb_3.name())
+ await full_node_api.process_spend_bundles(bundles=[close_sb_3])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_2, timeout=30)
+
+ await time_out_assert(20, wallet_2.get_confirmed_balance, funds + proposal_amount_1)
+ expected_balance = xch_funds - proposal_amount_1 + (3 * proposal_min_amt) - 3 - new_mint_amount
+ await time_out_assert(20, dao_wallet_0.get_balance_by_asset_type, expected_balance)
+
+ await time_out_assert(20, get_proposal_state, (False, True), *[dao_wallet_0, 3])
+ await time_out_assert(20, get_proposal_state, (False, True), *[dao_wallet_1, 3])
+ await time_out_assert(20, get_proposal_state, (False, True), *[dao_wallet_2, 3])
+
+ # Proposal 4 - Self Destruct a broken proposal
+ vote_tx_4 = await dao_wallet_1.generate_proposal_vote_spend(
+ prop_4.proposal_id, dao_cat_1_bal, True, DEFAULT_TX_CONFIG
+ )
+ await wallet_1.wallet_state_manager.add_pending_transaction(vote_tx_4)
+ vote_sb_4 = vote_tx_4.spend_bundle
+ assert vote_sb_4 is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, vote_sb_4.name())
+ await full_node_api.process_spend_bundles(bundles=[vote_sb_4])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ for _ in range(10):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_2, timeout=30)
+
+ prop_4_state = await dao_wallet_1.get_proposal_state(prop_4.proposal_id)
+ assert prop_4_state["passed"]
+ assert prop_4_state["closable"]
+
+ with pytest.raises(Exception) as e_info:
+ close_tx_4 = await dao_wallet_0.create_proposal_close_spend(prop_4.proposal_id, DEFAULT_TX_CONFIG)
+ assert e_info.value.args[0] == "Unrecognised proposal type"
+
+ close_tx_4 = await dao_wallet_0.create_proposal_close_spend(
+ prop_4.proposal_id, DEFAULT_TX_CONFIG, self_destruct=True
+ )
+ await wallet_0.wallet_state_manager.add_pending_transaction(close_tx_4)
+ close_sb_4 = close_tx_4.spend_bundle
+ assert close_sb_4 is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, close_sb_4.name())
+ await full_node_api.process_spend_bundles(bundles=[close_sb_4])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_2, timeout=30)
+
+ # expected balance is unchanged because broken props can't release their amount
+ await time_out_assert(20, dao_wallet_0.get_balance_by_asset_type, expected_balance)
+ await time_out_assert(20, get_proposal_state, (True, True), *[dao_wallet_0, 4])
+ await time_out_assert(20, get_proposal_state, (True, True), *[dao_wallet_1, 4])
+ await time_out_assert(20, get_proposal_state, (True, True), *[dao_wallet_2, 4])
+
+ # Remove Proposals from Memory and Free up locked coins
+ await time_out_assert(20, len, 5, dao_wallet_0.dao_info.proposals_list)
+ await dao_wallet_0.clear_finished_proposals_from_memory()
+ free_tx = await dao_wallet_0.free_coins_from_finished_proposals(DEFAULT_TX_CONFIG, fee=uint64(100))
+ await wallet_0.wallet_state_manager.add_pending_transaction(free_tx)
+ free_sb = free_tx.spend_bundle
+ assert free_sb is not None
+ await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, free_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[free_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ await dao_wallet_0.clear_finished_proposals_from_memory()
+ await time_out_assert(20, len, 0, dao_wallet_0.dao_info.proposals_list)
+
+
+@pytest.mark.limit_consensus_modes(reason="does not depend on consensus rules")
+@pytest.mark.parametrize(
+ "trusted",
+ [True, False],
+)
+@pytest.mark.asyncio
+async def test_dao_proposal_partial_vote(
+ self_hostname: str, three_wallet_nodes: SimulatorsAndWallets, trusted: bool, consensus_mode: ConsensusMode
+) -> None:
+ num_blocks = 1
+ full_nodes, wallets, _ = three_wallet_nodes
+ full_node_api = full_nodes[0]
+ full_node_server = full_node_api.server
+ wallet_node_0, server_0 = wallets[0]
+ wallet_node_1, server_1 = wallets[1]
+ wallet_node_2, server_2 = wallets[2]
+ wallet = wallet_node_0.wallet_state_manager.main_wallet
+ wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
+ wallet_2 = wallet_node_2.wallet_state_manager.main_wallet
+ ph = await wallet.get_new_puzzlehash()
+ ph_1 = await wallet_1.get_new_puzzlehash()
+ ph_2 = await wallet_2.get_new_puzzlehash()
+
+ if trusted:
+ wallet_node_0.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ wallet_node_1.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ wallet_node_2.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ else:
+ wallet_node_0.config["trusted_peers"] = {}
+ wallet_node_1.config["trusted_peers"] = {}
+ wallet_node_2.config["trusted_peers"] = {}
+
+ await server_0.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_1.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_2.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+
+ for i in range(0, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_1))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_2))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+
+ funds = sum(
+ [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks + 1)]
+ )
+
+ await time_out_assert(20, wallet.get_confirmed_balance, funds)
+ await time_out_assert(20, full_node_api.wallet_is_synced, True, wallet_node_0)
+
+ cat_amt = 300000
+ dao_rules = DAORules(
+ proposal_timelock=uint64(10),
+ soft_close_length=uint64(5),
+ attendance_required=uint64(1000), # 10%
+ pass_percentage=uint64(5100), # 51%
+ self_destruct_length=uint64(20),
+ oracle_spend_delay=uint64(10),
+ proposal_minimum_amount=uint64(1),
+ )
+
+ dao_wallet_0 = await DAOWallet.create_new_dao_and_wallet(
+ wallet_node_0.wallet_state_manager,
+ wallet,
+ uint64(cat_amt),
+ dao_rules,
+ DEFAULT_TX_CONFIG,
+ )
+ assert dao_wallet_0 is not None
+
+ # Get the full node sim to process the wallet creation spend
+ tx_queue: List[TransactionRecord] = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ tx_record = tx_queue[0]
+ await full_node_api.process_transaction_records(records=[tx_record])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ # get the cat wallets
+ cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.cat_wallet_id]
+ await time_out_assert(10, cat_wallet_0.get_confirmed_balance, cat_amt)
+
+ # get the dao_cat wallet
+ dao_cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.dao_cat_wallet_id]
+
+ treasury_id = dao_wallet_0.dao_info.treasury_id
+
+ # Create the other user's wallet from the treasury id
+ dao_wallet_1 = await DAOWallet.create_new_dao_wallet_for_existing_dao(
+ wallet_node_1.wallet_state_manager,
+ wallet_1,
+ treasury_id,
+ )
+ assert dao_wallet_1 is not None
+ assert dao_wallet_1.dao_info.treasury_id == treasury_id
+
+ # Create funding spends for xch
+ xch_funds = uint64(500000)
+ funding_tx = await dao_wallet_0.create_add_funds_to_treasury_spend(
+ xch_funds,
+ DEFAULT_TX_CONFIG,
+ )
+ await wallet.wallet_state_manager.add_pending_transaction(funding_tx)
+ assert isinstance(funding_tx, TransactionRecord)
+ funding_sb = funding_tx.spend_bundle
+ assert isinstance(funding_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, funding_sb.name())
+ await full_node_api.process_transaction_records(records=[funding_tx])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ # Check that the funding spend is recognized by both dao wallets
+ await time_out_assert(10, dao_wallet_0.get_balance_by_asset_type, xch_funds)
+
+ # Send some dao_cats to wallet_1
+ # Get the cat wallets for wallet_1
+ cat_wallet_1 = dao_wallet_1.wallet_state_manager.wallets[dao_wallet_1.dao_info.cat_wallet_id]
+ dao_cat_wallet_1 = dao_wallet_1.wallet_state_manager.wallets[dao_wallet_1.dao_info.dao_cat_wallet_id]
+ assert cat_wallet_1
+ assert dao_cat_wallet_1
+
+ cat_tx = await cat_wallet_0.generate_signed_transaction([100000], [ph_1], DEFAULT_TX_CONFIG)
+ cat_sb = cat_tx[0].spend_bundle
+ await wallet.wallet_state_manager.add_pending_transaction(cat_tx[0])
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, cat_sb.name())
+ await full_node_api.process_transaction_records(records=cat_tx)
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await time_out_assert(10, cat_wallet_1.get_spendable_balance, 100000)
+
+ # Create dao cats for voting
+ dao_cat_0_bal = await dao_cat_wallet_0.get_votable_balance()
+ txs = await dao_cat_wallet_0.enter_dao_cat_voting_mode(dao_cat_0_bal, DEFAULT_TX_CONFIG)
+ for tx in txs:
+ await wallet.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb = txs[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb.name())
+ await full_node_api.process_transaction_records(records=txs)
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # Create a mint proposal
+ recipient_puzzle_hash = await cat_wallet_1.get_new_inner_hash()
+ new_mint_amount = uint64(500)
+ mint_proposal_inner = await generate_mint_proposal_innerpuz(
+ treasury_id,
+ cat_wallet_0.cat_info.limitations_program_hash,
+ new_mint_amount,
+ recipient_puzzle_hash,
+ )
+
+ vote_amount = dao_cat_0_bal - 10
+ proposal_tx = await dao_wallet_0.generate_new_proposal(
+ mint_proposal_inner, DEFAULT_TX_CONFIG, vote_amount=vote_amount, fee=uint64(1000)
+ )
+ await wallet.wallet_state_manager.add_pending_transaction(proposal_tx)
+ assert isinstance(proposal_tx, TransactionRecord)
+ proposal_sb = proposal_tx.spend_bundle
+ assert isinstance(proposal_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, proposal_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[proposal_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # Check the proposal is saved
+ assert len(dao_wallet_0.dao_info.proposals_list) == 1
+ assert dao_wallet_0.dao_info.proposals_list[0].amount_voted == vote_amount
+ assert dao_wallet_0.dao_info.proposals_list[0].timer_coin is not None
+
+ # Check that wallet_1 also finds and saved the proposal
+ assert len(dao_wallet_1.dao_info.proposals_list) == 1
+ prop = dao_wallet_1.dao_info.proposals_list[0]
+
+ # Create votable dao cats and add a new vote
+ dao_cat_1_bal = await dao_cat_wallet_1.get_votable_balance()
+ txs = await dao_cat_wallet_1.enter_dao_cat_voting_mode(dao_cat_1_bal, DEFAULT_TX_CONFIG)
+ for tx in txs:
+ await wallet_1.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb = txs[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb.name())
+ await full_node_api.process_transaction_records(records=txs)
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ vote_tx = await dao_wallet_1.generate_proposal_vote_spend(
+ prop.proposal_id, dao_cat_1_bal // 2, True, DEFAULT_TX_CONFIG
+ )
+ await wallet_1.wallet_state_manager.add_pending_transaction(vote_tx)
+ vote_sb = vote_tx.spend_bundle
+ assert vote_sb is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, vote_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[vote_sb])
+
+ for i in range(1, dao_rules.proposal_timelock + 1):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ total_votes = vote_amount + dao_cat_1_bal // 2
+
+ assert dao_wallet_0.dao_info.proposals_list[0].amount_voted == total_votes
+ assert dao_wallet_0.dao_info.proposals_list[0].yes_votes == total_votes
+ assert dao_wallet_1.dao_info.proposals_list[0].amount_voted == total_votes
+ assert dao_wallet_1.dao_info.proposals_list[0].yes_votes == total_votes
+
+ try:
+ close_tx = await dao_wallet_0.create_proposal_close_spend(prop.proposal_id, DEFAULT_TX_CONFIG, fee=uint64(100))
+ await wallet.wallet_state_manager.add_pending_transaction(close_tx)
+ close_sb = close_tx.spend_bundle
+ except Exception as e: # pragma: no cover
+ print(e)
+
+ assert close_sb is not None
+ await full_node_api.process_spend_bundles(bundles=[close_sb])
+ balance = await cat_wallet_1.get_spendable_balance()
+
+ assert close_sb is not None
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await time_out_assert(20, get_proposal_state, (True, True), dao_wallet_0, 0)
+ await time_out_assert(20, get_proposal_state, (True, True), dao_wallet_1, 0)
+
+ await time_out_assert(20, cat_wallet_1.get_spendable_balance, balance + new_mint_amount)
+ # Can we spend the newly minted CATs?
+ old_balance = await cat_wallet_0.get_spendable_balance()
+ ph_0 = await cat_wallet_0.get_new_inner_hash()
+ cat_tx = await cat_wallet_1.generate_signed_transaction([balance + new_mint_amount], [ph_0], DEFAULT_TX_CONFIG)
+ cat_sb = cat_tx[0].spend_bundle
+ await wallet_1.wallet_state_manager.add_pending_transaction(cat_tx[0])
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, cat_sb.name())
+ await full_node_api.process_transaction_records(records=cat_tx)
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await time_out_assert(20, cat_wallet_1.get_spendable_balance, 0)
+ await time_out_assert(20, cat_wallet_0.get_spendable_balance, old_balance + balance + new_mint_amount)
+ # release coins
+ await dao_wallet_0.free_coins_from_finished_proposals(DEFAULT_TX_CONFIG)
+
+
+@pytest.mark.limit_consensus_modes(reason="does not depend on consensus rules")
+@pytest.mark.parametrize(
+ "trusted",
+ [True, False],
+)
+@pytest.mark.asyncio
+async def test_dao_rpc_api(
+ self_hostname: str, two_wallet_nodes: Any, trusted: Any, consensus_mode: ConsensusMode
+) -> None:
+ num_blocks = 2 # use 2 here so the test doesn't become flaky if things get slow
+ full_nodes, wallets, _ = two_wallet_nodes
+ full_node_api = full_nodes[0]
+ full_node_server = full_node_api.server
+ wallet_node_0, server_0 = wallets[0]
+ wallet_node_1, server_1 = wallets[1]
+ wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
+ wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
+
+ ph_0 = await wallet_0.get_new_puzzlehash()
+ ph_1 = await wallet_1.get_new_puzzlehash()
+
+ if trusted:
+ wallet_node_0.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ wallet_node_1.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ else:
+ wallet_node_0.config["trusted_peers"] = {}
+ wallet_node_1.config["trusted_peers"] = {}
+
+ await server_0.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_1.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_0))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+
+ funds = sum(
+ [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
+ )
+
+ await time_out_assert(30, wallet_0.get_unconfirmed_balance, funds)
+ await time_out_assert(30, wallet_0.get_confirmed_balance, funds)
+ await time_out_assert(30, wallet_node_0.wallet_state_manager.synced, True)
+ api_0 = WalletRpcApi(wallet_node_0)
+ api_1 = WalletRpcApi(wallet_node_1)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ cat_amt = 300000
+ fee = 10000
+ dao_rules = DAORules(
+ proposal_timelock=uint64(10),
+ soft_close_length=uint64(5),
+ attendance_required=uint64(1000), # 10%
+ pass_percentage=uint64(5100), # 51%
+ self_destruct_length=uint64(20),
+ oracle_spend_delay=uint64(10),
+ proposal_minimum_amount=uint64(1),
+ )
+
+ # Try to create a DAO without rules
+ with pytest.raises(ValueError) as e_info:
+ dao_wallet_0 = await api_0.create_new_wallet(
+ dict(
+ wallet_type="dao_wallet",
+ name="DAO WALLET 1",
+ mode="new",
+ amount_of_cats=cat_amt,
+ filter_amount=1,
+ fee=fee,
+ )
+ )
+ assert e_info.value.args[0] == "DAO rules must be specified for wallet creation"
+
+ dao_wallet_0 = await api_0.create_new_wallet(
+ dict(
+ wallet_type="dao_wallet",
+ name="DAO WALLET 1",
+ mode="new",
+ dao_rules=dao_rules,
+ amount_of_cats=cat_amt,
+ filter_amount=1,
+ fee=fee,
+ )
+ )
+ assert isinstance(dao_wallet_0, dict)
+ assert dao_wallet_0.get("success")
+ dao_wallet_0_id = dao_wallet_0["wallet_id"]
+ dao_cat_wallet_0_id = dao_wallet_0["cat_wallet_id"]
+ treasury_id = bytes32(dao_wallet_0["treasury_id"])
+ spend_bundle_list = await wallet_node_0.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(dao_wallet_0_id)
+ spend_bundle = spend_bundle_list[0].spend_bundle
+ await time_out_assert_not_none(30, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
+
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ await time_out_assert(30, wallet_0.get_pending_change_balance, 0)
+ expected_xch = funds - 1 - cat_amt - fee
+ await time_out_assert(30, wallet_0.get_confirmed_balance, expected_xch)
+
+ dao_wallet_1 = await api_1.create_new_wallet(
+ dict(
+ wallet_type="dao_wallet",
+ name="DAO WALLET 2",
+ mode="existing",
+ treasury_id=treasury_id.hex(),
+ filter_amount=1,
+ )
+ )
+ assert isinstance(dao_wallet_1, dict)
+ assert dao_wallet_1.get("success")
+ dao_wallet_1_id = dao_wallet_1["wallet_id"]
+ # Create a cat wallet and add funds to treasury
+ new_cat_amt = 1000000000000
+ cat_wallet_0 = await api_0.create_new_wallet(
+ dict(
+ wallet_type="cat_wallet",
+ name="CAT WALLET 1",
+ test=True,
+ mode="new",
+ amount=new_cat_amt,
+ )
+ )
+ tx_queue: List[TransactionRecord] = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=[tx for tx in tx_queue])
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ cat_wallet_0_id = cat_wallet_0["wallet_id"]
+ cat_id = bytes32.from_hexstr(cat_wallet_0["asset_id"])
+
+ await rpc_state(
+ 20,
+ api_0.get_wallet_balance,
+ [{"wallet_id": cat_wallet_0_id}],
+ lambda x: x["wallet_balance"]["confirmed_wallet_balance"],
+ new_cat_amt,
+ )
+
+ cat_funding_amt = 500000
+ await api_0.dao_add_funds_to_treasury(
+ dict(
+ wallet_id=dao_wallet_0_id,
+ amount=cat_funding_amt,
+ funding_wallet_id=cat_wallet_0_id,
+ )
+ )
+
+ xch_funding_amt = 200000
+ await api_0.dao_add_funds_to_treasury(
+ dict(
+ wallet_id=dao_wallet_0_id,
+ amount=xch_funding_amt,
+ funding_wallet_id=1,
+ )
+ )
+ tx_queue = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=[tx for tx in tx_queue])
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ expected_xch -= xch_funding_amt + new_cat_amt
+ await time_out_assert(30, wallet_0.get_confirmed_balance, expected_xch)
+
+ await rpc_state(
+ 20,
+ api_0.get_wallet_balance,
+ [{"wallet_id": cat_wallet_0_id}],
+ lambda x: x["wallet_balance"]["confirmed_wallet_balance"],
+ new_cat_amt - cat_funding_amt,
+ )
+
+ balances = await api_1.dao_get_treasury_balance({"wallet_id": dao_wallet_1_id})
+ assert balances["balances"]["xch"] == xch_funding_amt
+ assert balances["balances"][cat_id.hex()] == cat_funding_amt
+
+ # Send some cats to wallet_1
+ await api_0.cat_spend(
+ {
+ "wallet_id": dao_cat_wallet_0_id,
+ "amount": cat_amt // 2,
+ "inner_address": encode_puzzle_hash(ph_1, "xch"),
+ "fee": fee,
+ }
+ )
+ tx_queue = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=[tx for tx in tx_queue])
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ await rpc_state(
+ 20,
+ api_0.get_wallet_balance,
+ [{"wallet_id": dao_cat_wallet_0_id}],
+ lambda x: x["wallet_balance"]["confirmed_wallet_balance"],
+ cat_amt // 2,
+ )
+
+ # send cats to lockup
+ await api_0.dao_send_to_lockup({"wallet_id": dao_wallet_0_id, "amount": cat_amt // 2})
+ tx_queue = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=[tx for tx in tx_queue])
+ await api_1.dao_send_to_lockup({"wallet_id": dao_wallet_1_id, "amount": cat_amt // 2})
+ tx_queue = await wallet_node_1.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=[tx for tx in tx_queue])
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ # create a spend proposal
+ additions = [
+ {"puzzle_hash": ph_1.hex(), "amount": 1000},
+ ]
+ create_proposal = await api_0.dao_create_proposal(
+ {
+ "wallet_id": dao_wallet_0_id,
+ "proposal_type": "spend",
+ "additions": additions,
+ "vote_amount": cat_amt // 2,
+ "fee": fee,
+ }
+ )
+ assert create_proposal["success"]
+ tx_queue = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=[tx for tx in tx_queue])
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(20, api_0.dao_get_proposals, [{"wallet_id": dao_wallet_0_id}], lambda x: len(x["proposals"]), 1)
+
+ await rpc_state(20, api_1.dao_get_proposals, [{"wallet_id": dao_wallet_1_id}], lambda x: len(x["proposals"]), 1)
+
+ props_0 = await api_0.dao_get_proposals({"wallet_id": dao_wallet_0_id})
+ prop = props_0["proposals"][0]
+ assert prop.amount_voted == cat_amt // 2
+ assert prop.yes_votes == cat_amt // 2
+
+ state = await api_0.dao_get_proposal_state({"wallet_id": dao_wallet_0_id, "proposal_id": prop.proposal_id.hex()})
+ assert state["state"]["passed"]
+ assert not state["state"]["closable"]
+
+ # Add votes
+ await api_1.dao_vote_on_proposal(
+ {
+ "wallet_id": dao_wallet_1_id,
+ "vote_amount": cat_amt // 2,
+ "proposal_id": prop.proposal_id.hex(),
+ "is_yes_vote": True,
+ }
+ )
+ tx_queue = await wallet_node_1.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=[tx for tx in tx_queue])
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(
+ 20, api_0.dao_get_proposals, [{"wallet_id": dao_wallet_0_id}], lambda x: x["proposals"][0].amount_voted, cat_amt
+ )
+
+ # farm blocks until we can close proposal
+ for _ in range(1, state["state"]["blocks_needed"]):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(
+ 20,
+ api_0.dao_get_proposal_state,
+ [{"wallet_id": dao_wallet_0_id, "proposal_id": prop.proposal_id.hex()}],
+ lambda x: x["state"]["closable"],
+ True,
+ )
+
+ await api_0.dao_close_proposal({"wallet_id": dao_wallet_0_id, "proposal_id": prop.proposal_id.hex()})
+ tx_queue = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=[tx for tx in tx_queue])
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(
+ 20, api_0.dao_get_proposals, [{"wallet_id": dao_wallet_0_id}], lambda x: x["proposals"][0].closed, True
+ )
+
+ # check that the proposal state has changed for everyone
+ await rpc_state(
+ 20,
+ api_0.dao_get_proposal_state,
+ [{"wallet_id": dao_wallet_0_id, "proposal_id": prop.proposal_id.hex()}],
+ lambda x: x["state"]["closed"],
+ True,
+ )
+
+ await rpc_state(
+ 20,
+ api_1.dao_get_proposal_state,
+ [{"wallet_id": dao_wallet_1_id, "proposal_id": prop.proposal_id.hex()}],
+ lambda x: x["state"]["closed"],
+ True,
+ )
+
+ # create a mint proposal
+ mint_proposal = await api_0.dao_create_proposal(
+ {
+ "wallet_id": dao_wallet_0_id,
+ "proposal_type": "mint",
+ "amount": uint64(10000),
+ "cat_target_address": encode_puzzle_hash(ph_0, "xch"),
+ "vote_amount": cat_amt // 2,
+ "fee": fee,
+ }
+ )
+ assert mint_proposal["success"]
+ tx_queue = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=[tx for tx in tx_queue])
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(20, api_0.dao_get_proposals, [{"wallet_id": dao_wallet_0_id}], lambda x: len(x["proposals"]), 2)
+
+ await rpc_state(20, api_1.dao_get_proposals, [{"wallet_id": dao_wallet_1_id}], lambda x: len(x["proposals"]), 2)
+
+ props = await api_0.dao_get_proposals({"wallet_id": dao_wallet_0_id})
+ prop = props["proposals"][1]
+ assert prop.amount_voted == cat_amt // 2
+ assert prop.yes_votes == cat_amt // 2
+
+ state = await api_0.dao_get_proposal_state({"wallet_id": dao_wallet_0_id, "proposal_id": prop.proposal_id.hex()})
+ assert state["state"]["passed"]
+ assert not state["state"]["closable"]
+
+ # Add votes
+ await api_1.dao_vote_on_proposal(
+ {
+ "wallet_id": dao_wallet_1_id,
+ "vote_amount": cat_amt // 2,
+ "proposal_id": prop.proposal_id.hex(),
+ "is_yes_vote": True,
+ }
+ )
+ tx_queue = await wallet_node_1.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=[tx for tx in tx_queue])
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(
+ 20, api_0.dao_get_proposals, [{"wallet_id": dao_wallet_0_id}], lambda x: x["proposals"][1].amount_voted, cat_amt
+ )
+
+ # farm blocks until we can close proposal
+ for _ in range(1, state["state"]["blocks_needed"]):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(
+ 20,
+ api_0.dao_get_proposal_state,
+ [{"wallet_id": dao_wallet_0_id, "proposal_id": prop.proposal_id.hex()}],
+ lambda x: x["state"]["closable"],
+ True,
+ )
+
+ await api_0.dao_close_proposal({"wallet_id": dao_wallet_0_id, "proposal_id": prop.proposal_id.hex()})
+ tx_queue = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=[tx for tx in tx_queue])
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(
+ 20, api_0.dao_get_proposals, [{"wallet_id": dao_wallet_0_id}], lambda x: x["proposals"][1].closed, True
+ )
+
+ # check that the proposal state has changed for everyone
+ await rpc_state(
+ 20,
+ api_0.dao_get_proposal_state,
+ [{"wallet_id": dao_wallet_0_id, "proposal_id": prop.proposal_id.hex()}],
+ lambda x: x["state"]["closed"],
+ True,
+ )
+
+ await rpc_state(
+ 20,
+ api_1.dao_get_proposal_state,
+ [{"wallet_id": dao_wallet_1_id, "proposal_id": prop.proposal_id.hex()}],
+ lambda x: x["state"]["closed"],
+ True,
+ )
+
+ # Check the minted cats are received
+ await rpc_state(
+ 20,
+ api_0.get_wallet_balance,
+ [{"wallet_id": dao_cat_wallet_0_id}],
+ lambda x: x["wallet_balance"]["confirmed_wallet_balance"],
+ 10000,
+ )
+
+ # create an update proposal
+ new_dao_rules = {"pass_percentage": 10000}
+ update_proposal = await api_0.dao_create_proposal(
+ {
+ "wallet_id": dao_wallet_0_id,
+ "proposal_type": "update",
+ "new_dao_rules": new_dao_rules,
+ "vote_amount": cat_amt // 2,
+ "fee": fee,
+ }
+ )
+ assert update_proposal["success"]
+ tx_queue = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=[tx for tx in tx_queue])
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(20, api_0.dao_get_proposals, [{"wallet_id": dao_wallet_0_id}], lambda x: len(x["proposals"]), 3)
+
+ await rpc_state(20, api_1.dao_get_proposals, [{"wallet_id": dao_wallet_1_id}], lambda x: len(x["proposals"]), 3)
+
+ props = await api_0.dao_get_proposals({"wallet_id": dao_wallet_0_id})
+ prop = props["proposals"][2]
+ assert prop.amount_voted == cat_amt // 2
+ assert prop.yes_votes == cat_amt // 2
+
+ state = await api_0.dao_get_proposal_state({"wallet_id": dao_wallet_0_id, "proposal_id": prop.proposal_id.hex()})
+ assert state["state"]["passed"]
+ assert not state["state"]["closable"]
+
+ # Add votes
+ await api_1.dao_vote_on_proposal(
+ {
+ "wallet_id": dao_wallet_1_id,
+ "vote_amount": cat_amt // 2,
+ "proposal_id": prop.proposal_id.hex(),
+ "is_yes_vote": True,
+ }
+ )
+ tx_queue = await wallet_node_1.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=[tx for tx in tx_queue])
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(
+ 20, api_0.dao_get_proposals, [{"wallet_id": dao_wallet_0_id}], lambda x: x["proposals"][2].amount_voted, cat_amt
+ )
+
+ # farm blocks until we can close proposal
+ for _ in range(1, state["state"]["blocks_needed"]):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(
+ 20,
+ api_0.dao_get_proposal_state,
+ [{"wallet_id": dao_wallet_0_id, "proposal_id": prop.proposal_id.hex()}],
+ lambda x: x["state"]["closable"],
+ True,
+ )
+
+ open_props = await api_0.dao_get_proposals({"wallet_id": dao_wallet_0_id, "include_closed": False})
+ assert len(open_props["proposals"]) == 1
+
+ await api_0.dao_close_proposal({"wallet_id": dao_wallet_0_id, "proposal_id": prop.proposal_id.hex()})
+ tx_queue = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ await full_node_api.process_transaction_records(records=[tx for tx in tx_queue])
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(
+ 20, api_0.dao_get_proposals, [{"wallet_id": dao_wallet_0_id}], lambda x: x["proposals"][1].closed, True
+ )
+
+ # check that the proposal state has changed for everyone
+ await rpc_state(
+ 20,
+ api_0.dao_get_proposal_state,
+ [{"wallet_id": dao_wallet_0_id, "proposal_id": prop.proposal_id.hex()}],
+ lambda x: x["state"]["closed"],
+ True,
+ )
+
+ await rpc_state(
+ 20,
+ api_1.dao_get_proposal_state,
+ [{"wallet_id": dao_wallet_1_id, "proposal_id": prop.proposal_id.hex()}],
+ lambda x: x["state"]["closed"],
+ True,
+ )
+
+ # Check the rules have updated
+ dao_wallet = wallet_node_0.wallet_state_manager.wallets[dao_wallet_0_id]
+ assert dao_wallet.dao_rules.pass_percentage == 10000
+
+ # Test adjust filter level
+ resp = await api_0.dao_adjust_filter_level({"wallet_id": dao_wallet_1_id, "filter_level": 101})
+ assert resp["success"]
+ assert resp["dao_info"].filter_below_vote_amount == 101
+
+ # Test get_treasury_id
+ resp = await api_0.dao_get_treasury_id({"wallet_id": dao_wallet_0_id})
+ assert resp["treasury_id"] == treasury_id
+
+
+@pytest.mark.limit_consensus_modes(reason="does not depend on consensus rules")
+@pytest.mark.parametrize(
+ "trusted",
+ [True, False],
+)
+@pytest.mark.asyncio
+async def test_dao_rpc_client(
+ two_wallet_nodes_services: SimulatorsAndWalletsServices,
+ trusted: bool,
+ self_hostname: str,
+ consensus_mode: ConsensusMode,
+) -> None:
+ num_blocks = 3
+ [full_node_service], wallet_services, bt = two_wallet_nodes_services
+ full_node_api = full_node_service._api
+ full_node_server = full_node_api.full_node.server
+ wallet_node_0 = wallet_services[0]._node
+ server_0 = wallet_node_0.server
+ wallet_node_1 = wallet_services[1]._node
+ server_1 = wallet_node_1.server
+ wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
+ wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
+ ph_0 = await wallet_0.get_new_puzzlehash()
+ ph_1 = await wallet_1.get_new_puzzlehash()
+
+ if trusted:
+ wallet_node_0.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()}
+ wallet_node_1.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()}
+ else:
+ wallet_node_0.config["trusted_peers"] = {}
+ wallet_node_1.config["trusted_peers"] = {}
+
+ await server_0.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_1.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_0))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_1))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ initial_funds = sum(
+ [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
+ )
+
+ await time_out_assert(15, wallet_0.get_confirmed_balance, initial_funds)
+ await time_out_assert(15, wallet_0.get_unconfirmed_balance, initial_funds)
+
+ assert wallet_services[0].rpc_server is not None
+ assert wallet_services[1].rpc_server is not None
+
+ client_0 = await WalletRpcClient.create(
+ self_hostname,
+ wallet_services[0].rpc_server.listen_port,
+ wallet_services[0].root_path,
+ wallet_services[0].config,
+ )
+ await validate_get_routes(client_0, wallet_services[0].rpc_server.rpc_api)
+ client_1 = await WalletRpcClient.create(
+ self_hostname,
+ wallet_services[1].rpc_server.listen_port,
+ wallet_services[1].root_path,
+ wallet_services[1].config,
+ )
+ await validate_get_routes(client_1, wallet_services[1].rpc_server.rpc_api)
+
+ try:
+ cat_amt = uint64(150000)
+ amount_of_cats = uint64(cat_amt * 2)
+ dao_rules = DAORules(
+ proposal_timelock=uint64(8),
+ soft_close_length=uint64(4),
+ attendance_required=uint64(1000), # 10%
+ pass_percentage=uint64(4900), # 49%
+ self_destruct_length=uint64(20),
+ oracle_spend_delay=uint64(10),
+ proposal_minimum_amount=uint64(1),
+ )
+ filter_amount = uint64(1)
+ fee = uint64(10000)
+
+ # create new dao
+ dao_wallet_dict_0 = await client_0.create_new_dao_wallet(
+ mode="new",
+ tx_config=DEFAULT_TX_CONFIG,
+ dao_rules=dao_rules.to_json_dict(),
+ amount_of_cats=amount_of_cats,
+ filter_amount=filter_amount,
+ name="DAO WALLET 0",
+ )
+ assert dao_wallet_dict_0["success"]
+ dao_id_0 = dao_wallet_dict_0["wallet_id"]
+ treasury_id_hex = dao_wallet_dict_0["treasury_id"]
+ cat_wallet_0 = wallet_node_0.wallet_state_manager.wallets[dao_wallet_dict_0["cat_wallet_id"]]
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await time_out_assert(20, cat_wallet_0.get_confirmed_balance, amount_of_cats)
+
+ # Create a new standard cat for treasury funds
+ new_cat_amt = uint64(100000)
+ free_coins_res = await client_0.create_new_cat_and_wallet(new_cat_amt, test=True)
+ new_cat_wallet_id = free_coins_res["wallet_id"]
+ new_cat_wallet = wallet_node_0.wallet_state_manager.wallets[new_cat_wallet_id]
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # join dao
+ dao_wallet_dict_1 = await client_1.create_new_dao_wallet(
+ mode="existing",
+ tx_config=DEFAULT_TX_CONFIG,
+ treasury_id=treasury_id_hex,
+ filter_amount=filter_amount,
+ name="DAO WALLET 1",
+ )
+ assert dao_wallet_dict_1["success"]
+ dao_id_1 = dao_wallet_dict_1["wallet_id"]
+ cat_wallet_1 = wallet_node_1.wallet_state_manager.wallets[dao_wallet_dict_1["cat_wallet_id"]]
+
+ # fund treasury
+ xch_funds = uint64(10000000000)
+ funding_tx = await client_0.dao_add_funds_to_treasury(dao_id_0, 1, xch_funds, DEFAULT_TX_CONFIG)
+ cat_funding_tx = await client_0.dao_add_funds_to_treasury(
+ dao_id_0, new_cat_wallet_id, new_cat_amt, DEFAULT_TX_CONFIG
+ )
+ assert funding_tx["success"]
+ assert cat_funding_tx["success"]
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(20, client_0.dao_get_treasury_balance, [dao_id_0], lambda x: x["balances"]["xch"], xch_funds)
+ assert isinstance(new_cat_wallet, CATWallet)
+ new_cat_asset_id = new_cat_wallet.cat_info.limitations_program_hash
+ await rpc_state(
+ 20,
+ client_0.dao_get_treasury_balance,
+ [dao_id_0],
+ lambda x: x["balances"][new_cat_asset_id.hex()],
+ new_cat_amt,
+ )
+ await rpc_state(
+ 20,
+ client_0.dao_get_treasury_balance,
+ [dao_id_0],
+ lambda x: x["balances"]["xch"],
+ xch_funds,
+ )
+
+ # send cats to wallet 1
+ await client_0.cat_spend(
+ wallet_id=dao_wallet_dict_0["cat_wallet_id"],
+ tx_config=DEFAULT_TX_CONFIG,
+ amount=cat_amt,
+ inner_address=encode_puzzle_hash(ph_1, "xch"),
+ fee=fee,
+ )
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await time_out_assert(20, cat_wallet_0.get_confirmed_balance, cat_amt)
+ await time_out_assert(20, cat_wallet_1.get_confirmed_balance, cat_amt)
+
+ # send cats to lockup
+ lockup_0 = await client_0.dao_send_to_lockup(dao_id_0, cat_amt, DEFAULT_TX_CONFIG)
+ lockup_1 = await client_1.dao_send_to_lockup(dao_id_1, cat_amt, DEFAULT_TX_CONFIG)
+ assert lockup_0["success"]
+ assert lockup_1["success"]
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+
+ # create a spend proposal
+ additions = [
+ {"puzzle_hash": ph_0.hex(), "amount": 1000},
+ {"puzzle_hash": ph_0.hex(), "amount": 10000, "asset_id": new_cat_asset_id.hex()},
+ ]
+ proposal = await client_0.dao_create_proposal(
+ wallet_id=dao_id_0,
+ proposal_type="spend",
+ tx_config=DEFAULT_TX_CONFIG,
+ additions=additions,
+ vote_amount=cat_amt,
+ fee=fee,
+ )
+ assert proposal["success"]
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # check proposal is found by wallet 1
+ await rpc_state(20, client_1.dao_get_proposals, [dao_id_1], lambda x: x["proposals"][0]["yes_votes"], cat_amt)
+ props = await client_1.dao_get_proposals(dao_id_1)
+ proposal_id_hex = props["proposals"][0]["proposal_id"]
+
+ # create an update proposal
+ update_proposal = await client_1.dao_create_proposal(
+ wallet_id=dao_id_1,
+ proposal_type="update",
+ tx_config=DEFAULT_TX_CONFIG,
+ vote_amount=cat_amt,
+ new_dao_rules={"proposal_timelock": uint64(10)},
+ fee=fee,
+ )
+ assert update_proposal["success"]
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # create a mint proposal
+ mint_addr = await client_1.get_next_address(wallet_id=wallet_1.id(), new_address=False)
+ mint_proposal = await client_1.dao_create_proposal(
+ wallet_id=dao_id_1,
+ proposal_type="mint",
+ tx_config=DEFAULT_TX_CONFIG,
+ vote_amount=cat_amt,
+ amount=uint64(100),
+ cat_target_address=mint_addr,
+ fee=fee,
+ )
+ assert mint_proposal["success"]
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # vote spend
+ vote = await client_1.dao_vote_on_proposal(
+ wallet_id=dao_id_1,
+ proposal_id=proposal_id_hex,
+ vote_amount=cat_amt,
+ tx_config=DEFAULT_TX_CONFIG,
+ is_yes_vote=True,
+ fee=fee,
+ )
+ assert vote["success"]
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # check updated proposal is found by wallet 0
+ await rpc_state(
+ 20, client_0.dao_get_proposals, [dao_id_0], lambda x: x["proposals"][0]["yes_votes"], cat_amt * 2
+ )
+
+ # check proposal state and farm enough blocks to pass
+ state = await client_0.dao_get_proposal_state(wallet_id=dao_id_0, proposal_id=proposal_id_hex)
+ assert state["success"]
+ assert state["state"]["passed"]
+
+ for _ in range(0, state["state"]["blocks_needed"]):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ state = await client_0.dao_get_proposal_state(wallet_id=dao_id_0, proposal_id=proposal_id_hex)
+ assert state["success"]
+ assert state["state"]["closable"]
+
+ # check proposal parsing
+ props = await client_0.dao_get_proposals(dao_id_0)
+ proposal_2_hex = props["proposals"][1]["proposal_id"]
+ proposal_3_hex = props["proposals"][2]["proposal_id"]
+ parsed_1 = await client_0.dao_parse_proposal(wallet_id=dao_id_0, proposal_id=proposal_id_hex)
+ assert parsed_1["success"]
+ parsed_2 = await client_0.dao_parse_proposal(wallet_id=dao_id_0, proposal_id=proposal_2_hex)
+ assert parsed_2["success"]
+ parsed_3 = await client_0.dao_parse_proposal(wallet_id=dao_id_0, proposal_id=proposal_3_hex)
+ assert parsed_3["success"]
+
+ # close the proposal
+ close = await client_0.dao_close_proposal(
+ wallet_id=dao_id_0, proposal_id=proposal_id_hex, tx_config=DEFAULT_TX_CONFIG, self_destruct=False, fee=fee
+ )
+ assert close["success"]
+
+ for i in range(1, 10):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ # check proposal is closed
+ await rpc_state(20, client_0.dao_get_proposals, [dao_id_0], lambda x: x["proposals"][0]["closed"], True)
+ await rpc_state(20, client_1.dao_get_proposals, [dao_id_1], lambda x: x["proposals"][0]["closed"], True)
+ # check treasury balances
+ await rpc_state(
+ 20,
+ client_0.dao_get_treasury_balance,
+ [dao_id_0],
+ lambda x: x["balances"][new_cat_asset_id.hex()],
+ new_cat_amt - 10000,
+ )
+ await rpc_state(
+ 20, client_0.dao_get_treasury_balance, [dao_id_0], lambda x: x["balances"]["xch"], xch_funds - 1000
+ )
+
+ # check wallet balances
+ await rpc_state(
+ 20, client_0.get_wallet_balance, [new_cat_wallet_id], lambda x: x["confirmed_wallet_balance"], 10000
+ )
+ expected_xch = initial_funds - amount_of_cats - new_cat_amt - xch_funds - (2 * fee) - 2 - 9000
+ await rpc_state(
+ 20, client_0.get_wallet_balance, [wallet_0.id()], lambda x: x["confirmed_wallet_balance"], expected_xch
+ )
+
+ # close the mint proposal
+ props = await client_0.dao_get_proposals(dao_id_0)
+ proposal_id_hex = props["proposals"][2]["proposal_id"]
+ close = await client_0.dao_close_proposal(
+ wallet_id=dao_id_0, proposal_id=proposal_id_hex, tx_config=DEFAULT_TX_CONFIG, self_destruct=False, fee=fee
+ )
+ assert close["success"]
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ # check proposal is closed
+ await rpc_state(20, client_0.dao_get_proposals, [dao_id_0], lambda x: x["proposals"][2]["closed"], True)
+ await rpc_state(20, client_1.dao_get_proposals, [dao_id_1], lambda x: x["proposals"][2]["closed"], True)
+
+ # check minted cats are received
+ await rpc_state(
+ 20,
+ client_1.get_wallet_balance,
+ [dao_wallet_dict_1["cat_wallet_id"]],
+ lambda x: x["confirmed_wallet_balance"],
+ 100,
+ )
+
+ open_props = await client_0.dao_get_proposals(dao_id_0, False)
+ assert len(open_props["proposals"]) == 1
+
+ # close the update proposal
+ proposal_id_hex = props["proposals"][1]["proposal_id"]
+ close = await client_0.dao_close_proposal(
+ wallet_id=dao_id_0, proposal_id=proposal_id_hex, tx_config=DEFAULT_TX_CONFIG, self_destruct=False, fee=fee
+ )
+ assert close["success"]
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ # check proposal is closed
+ await rpc_state(20, client_0.dao_get_proposals, [dao_id_0], lambda x: x["proposals"][1]["closed"], True)
+ await rpc_state(20, client_1.dao_get_proposals, [dao_id_1], lambda x: x["proposals"][1]["closed"], True)
+
+ # check dao rules are updated
+ new_rules = await client_0.dao_get_rules(dao_id_0)
+ assert new_rules["rules"]["proposal_timelock"] == 10
+ new_rules_1 = await client_0.dao_get_rules(dao_id_1)
+ assert new_rules_1["rules"]["proposal_timelock"] == 10
+
+ # free locked cats from finished proposal
+ free_coins_res = await client_0.dao_free_coins_from_finished_proposals(
+ wallet_id=dao_id_0, tx_config=DEFAULT_TX_CONFIG
+ )
+ assert free_coins_res["success"]
+ free_coins_tx = TransactionRecord.from_json_dict(free_coins_res["tx"])
+ sb = free_coins_tx.spend_bundle
+ assert sb is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ bal = await client_0.get_wallet_balance(dao_wallet_dict_0["dao_cat_wallet_id"])
+ assert bal["confirmed_wallet_balance"] == cat_amt
+
+ exit = await client_0.dao_exit_lockup(dao_id_0, tx_config=DEFAULT_TX_CONFIG)
+ assert exit["success"]
+
+ # coverage tests for filter amount and get treasury id
+ treasury_id_resp = await client_0.dao_get_treasury_id(wallet_id=dao_id_0)
+ assert treasury_id_resp["treasury_id"] == treasury_id_hex
+ filter_amount_resp = await client_0.dao_adjust_filter_level(wallet_id=dao_id_0, filter_level=30)
+ assert filter_amount_resp["dao_info"]["filter_below_vote_amount"] == 30
+
+ finally:
+ client_0.close()
+ client_1.close()
+ await client_0.await_closed()
+ await client_1.await_closed()
+
+
+@pytest.mark.limit_consensus_modes(reason="does not depend on consensus rules")
+@pytest.mark.parametrize(
+ "trusted",
+ [True, False],
+)
+@pytest.mark.asyncio
+async def test_dao_complex_spends(
+ two_wallet_nodes_services: SimulatorsAndWalletsServices,
+ trusted: bool,
+ self_hostname: str,
+ consensus_mode: ConsensusMode,
+) -> None:
+ num_blocks = 3
+ [full_node_service], wallet_services, bt = two_wallet_nodes_services
+ full_node_api = full_node_service._api
+ full_node_server = full_node_api.full_node.server
+ wallet_node_0 = wallet_services[0]._node
+ server_0 = wallet_node_0.server
+ wallet_node_1 = wallet_services[1]._node
+ server_1 = wallet_node_1.server
+ wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
+ wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
+ ph_0 = await wallet_0.get_new_puzzlehash()
+ ph_1 = await wallet_1.get_new_puzzlehash()
+
+ if trusted:
+ wallet_node_0.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()}
+ wallet_node_1.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()}
+ else:
+ wallet_node_0.config["trusted_peers"] = {}
+ wallet_node_1.config["trusted_peers"] = {}
+
+ await server_0.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_1.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_0))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_1))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ initial_funds = sum(
+ [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
+ )
+
+ await time_out_assert(15, wallet_0.get_confirmed_balance, initial_funds)
+ await time_out_assert(15, wallet_0.get_unconfirmed_balance, initial_funds)
+
+ assert wallet_services[0].rpc_server is not None
+ assert wallet_services[1].rpc_server is not None
+
+ client_0 = await WalletRpcClient.create(
+ self_hostname,
+ wallet_services[0].rpc_server.listen_port,
+ wallet_services[0].root_path,
+ wallet_services[0].config,
+ )
+ await validate_get_routes(client_0, wallet_services[0].rpc_server.rpc_api)
+ client_1 = await WalletRpcClient.create(
+ self_hostname,
+ wallet_services[1].rpc_server.listen_port,
+ wallet_services[1].root_path,
+ wallet_services[1].config,
+ )
+ await validate_get_routes(client_1, wallet_services[1].rpc_server.rpc_api)
+
+ try:
+ cat_amt = uint64(300000)
+ dao_rules = DAORules(
+ proposal_timelock=uint64(2),
+ soft_close_length=uint64(2),
+ attendance_required=uint64(1000), # 10%
+ pass_percentage=uint64(5100), # 51%
+ self_destruct_length=uint64(5),
+ oracle_spend_delay=uint64(2),
+ proposal_minimum_amount=uint64(1),
+ )
+ filter_amount = uint64(1)
+
+ # create new dao
+ dao_wallet_dict_0 = await client_0.create_new_dao_wallet(
+ mode="new",
+ tx_config=DEFAULT_TX_CONFIG,
+ dao_rules=dao_rules.to_json_dict(),
+ amount_of_cats=cat_amt,
+ filter_amount=filter_amount,
+ name="DAO WALLET 0",
+ )
+ assert dao_wallet_dict_0["success"]
+ dao_id_0 = dao_wallet_dict_0["wallet_id"]
+ treasury_id_hex = dao_wallet_dict_0["treasury_id"]
+ cat_wallet_0 = wallet_node_0.wallet_state_manager.wallets[dao_wallet_dict_0["cat_wallet_id"]]
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await time_out_assert(20, cat_wallet_0.get_confirmed_balance, cat_amt)
+
+ # Create a new standard cat for treasury funds
+ new_cat_amt = uint64(1000000)
+ new_cat_wallet_dict = await client_0.create_new_cat_and_wallet(new_cat_amt, test=True)
+ new_cat_wallet_id = new_cat_wallet_dict["wallet_id"]
+ new_cat_wallet = wallet_node_0.wallet_state_manager.wallets[new_cat_wallet_id]
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # Create a new standard cat for treasury funds
+ new_cat_wallet_dict_2 = await client_0.create_new_cat_and_wallet(new_cat_amt, test=True)
+ new_cat_wallet_id_2 = new_cat_wallet_dict_2["wallet_id"]
+ new_cat_wallet_2 = wallet_node_0.wallet_state_manager.wallets[new_cat_wallet_id_2]
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # join dao
+ dao_wallet_dict_1 = await client_1.create_new_dao_wallet(
+ mode="existing",
+ tx_config=DEFAULT_TX_CONFIG,
+ treasury_id=treasury_id_hex,
+ filter_amount=filter_amount,
+ name="DAO WALLET 1",
+ )
+ assert dao_wallet_dict_1["success"]
+ dao_id_1 = dao_wallet_dict_1["wallet_id"]
+
+ # fund treasury so there are multiple coins for each asset
+ xch_funds = uint64(10000000000)
+ for _ in range(4):
+ funding_tx = await client_0.dao_add_funds_to_treasury(dao_id_0, 1, uint64(xch_funds / 4), DEFAULT_TX_CONFIG)
+ cat_funding_tx = await client_0.dao_add_funds_to_treasury(
+ dao_id_0, new_cat_wallet_id, uint64(new_cat_amt / 4), DEFAULT_TX_CONFIG
+ )
+ cat_funding_tx_2 = await client_0.dao_add_funds_to_treasury(
+ dao_id_0, new_cat_wallet_id_2, uint64(new_cat_amt / 4), DEFAULT_TX_CONFIG
+ )
+ assert funding_tx["success"]
+ assert cat_funding_tx["success"]
+ assert cat_funding_tx_2["success"]
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(20, client_0.dao_get_treasury_balance, [dao_id_0], lambda x: x["balances"]["xch"], xch_funds)
+ assert isinstance(new_cat_wallet, CATWallet)
+ new_cat_asset_id = new_cat_wallet.cat_info.limitations_program_hash
+ assert isinstance(new_cat_wallet_2, CATWallet)
+ new_cat_asset_id_2 = new_cat_wallet_2.cat_info.limitations_program_hash
+ await rpc_state(
+ 20,
+ client_0.dao_get_treasury_balance,
+ [dao_id_0],
+ lambda x: x["balances"][new_cat_asset_id.hex()],
+ new_cat_amt,
+ )
+ await rpc_state(
+ 20,
+ client_0.dao_get_treasury_balance,
+ [dao_id_0],
+ lambda x: x["balances"][new_cat_asset_id_2.hex()],
+ new_cat_amt,
+ )
+
+ # send cats to lockup
+ lockup_0 = await client_0.dao_send_to_lockup(dao_id_0, cat_amt, DEFAULT_TX_CONFIG)
+ assert lockup_0["success"]
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+
+ # Test spend proposal types
+
+ # Test proposal with multiple conditions and xch coins
+ additions = [
+ {"puzzle_hash": ph_0.hex(), "amount": xch_funds / 4},
+ {"puzzle_hash": ph_1.hex(), "amount": xch_funds / 4},
+ ]
+ proposal = await client_0.dao_create_proposal(
+ wallet_id=dao_id_0,
+ proposal_type="spend",
+ tx_config=DEFAULT_TX_CONFIG,
+ additions=additions,
+ vote_amount=cat_amt,
+ )
+ assert proposal["success"]
+ for i in range(1, 5):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ props = await client_1.dao_get_proposals(dao_id_1)
+ proposal_id_hex = props["proposals"][-1]["proposal_id"]
+
+ close = await client_0.dao_close_proposal(
+ wallet_id=dao_id_0, proposal_id=proposal_id_hex, tx_config=DEFAULT_TX_CONFIG, self_destruct=False
+ )
+ assert close["success"]
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ # check proposal is closed
+ await rpc_state(20, client_0.dao_get_proposals, [dao_id_0], lambda x: x["proposals"][-1]["closed"], True)
+ await rpc_state(20, client_1.dao_get_proposals, [dao_id_1], lambda x: x["proposals"][-1]["closed"], True)
+ # check the xch is received
+ await rpc_state(
+ 20,
+ client_1.get_wallet_balance,
+ [wallet_1.id()],
+ lambda x: x["confirmed_wallet_balance"],
+ initial_funds + (xch_funds / 4),
+ )
+
+ # Test proposal with multiple cats and multiple coins
+ cat_spend_amt = 510000
+ additions = [
+ {"puzzle_hash": ph_0.hex(), "amount": cat_spend_amt, "asset_id": new_cat_asset_id.hex()},
+ {"puzzle_hash": ph_0.hex(), "amount": cat_spend_amt, "asset_id": new_cat_asset_id_2.hex()},
+ ]
+ proposal = await client_0.dao_create_proposal(
+ wallet_id=dao_id_0,
+ proposal_type="spend",
+ tx_config=DEFAULT_TX_CONFIG,
+ additions=additions,
+ vote_amount=cat_amt,
+ )
+ assert proposal["success"]
+ for i in range(1, 5):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ props = await client_1.dao_get_proposals(dao_id_1)
+ proposal_id_hex = props["proposals"][-1]["proposal_id"]
+
+ close = await client_0.dao_close_proposal(
+ wallet_id=dao_id_0, proposal_id=proposal_id_hex, tx_config=DEFAULT_TX_CONFIG, self_destruct=False
+ )
+ assert close["success"]
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ # check proposal is closed
+ await rpc_state(20, client_0.dao_get_proposals, [dao_id_0], lambda x: x["proposals"][-1]["closed"], True)
+ await rpc_state(20, client_1.dao_get_proposals, [dao_id_1], lambda x: x["proposals"][-1]["closed"], True)
+
+ # check cat balances
+ await rpc_state(
+ 20,
+ client_0.dao_get_treasury_balance,
+ [dao_id_0],
+ lambda x: x["balances"][new_cat_asset_id.hex()],
+ new_cat_amt - cat_spend_amt,
+ )
+ await rpc_state(
+ 20,
+ client_0.dao_get_treasury_balance,
+ [dao_id_0],
+ lambda x: x["balances"][new_cat_asset_id_2.hex()],
+ new_cat_amt - cat_spend_amt,
+ )
+
+ await rpc_state(
+ 20, client_0.get_wallet_balance, [new_cat_wallet_id], lambda x: x["confirmed_wallet_balance"], cat_spend_amt
+ )
+ await rpc_state(
+ 20,
+ client_0.get_wallet_balance,
+ [new_cat_wallet_id_2],
+ lambda x: x["confirmed_wallet_balance"],
+ cat_spend_amt,
+ )
+
+ # Spend remaining balances with multiple outputs
+
+ additions = [
+ {"puzzle_hash": ph_0.hex(), "amount": 400000, "asset_id": new_cat_asset_id.hex()},
+ {"puzzle_hash": ph_1.hex(), "amount": 90000, "asset_id": new_cat_asset_id.hex()},
+ {"puzzle_hash": ph_0.hex(), "amount": 400000, "asset_id": new_cat_asset_id_2.hex()},
+ {"puzzle_hash": ph_1.hex(), "amount": 90000, "asset_id": new_cat_asset_id_2.hex()},
+ {"puzzle_hash": ph_0.hex(), "amount": xch_funds / 4},
+ {"puzzle_hash": ph_1.hex(), "amount": xch_funds / 4},
+ ]
+ proposal = await client_0.dao_create_proposal(
+ wallet_id=dao_id_0,
+ proposal_type="spend",
+ tx_config=DEFAULT_TX_CONFIG,
+ additions=additions,
+ vote_amount=cat_amt,
+ )
+ assert proposal["success"]
+ for i in range(1, 5):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ props = await client_1.dao_get_proposals(dao_id_1)
+ proposal_id_hex = props["proposals"][-1]["proposal_id"]
+
+ close = await client_0.dao_close_proposal(
+ wallet_id=dao_id_0,
+ proposal_id=proposal_id_hex,
+ tx_config=DEFAULT_TX_CONFIG,
+ self_destruct=False,
+ )
+ assert close["success"]
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ # check proposal is closed
+ await rpc_state(20, client_0.dao_get_proposals, [dao_id_0], lambda x: x["proposals"][0]["closed"], True)
+ await rpc_state(20, client_1.dao_get_proposals, [dao_id_1], lambda x: x["proposals"][0]["closed"], True)
+ # check cat balances
+ await rpc_state(
+ 20, client_0.dao_get_treasury_balance, [dao_id_0], lambda x: x["balances"][new_cat_asset_id.hex()], 0
+ )
+ await rpc_state(
+ 20, client_0.get_wallet_balance, [new_cat_wallet_id], lambda x: x["confirmed_wallet_balance"], 0
+ )
+ await rpc_state(
+ 20, client_0.get_wallet_balance, [new_cat_wallet_id_2], lambda x: x["confirmed_wallet_balance"], 0
+ )
+
+ # check wallet balances
+ await rpc_state(
+ 20, client_0.get_wallet_balance, [new_cat_wallet_id], lambda x: x["confirmed_wallet_balance"], cat_spend_amt
+ )
+ await rpc_state(
+ 20,
+ client_0.get_wallet_balance,
+ [new_cat_wallet_id_2],
+ lambda x: x["confirmed_wallet_balance"],
+ cat_spend_amt,
+ )
+ await rpc_state(
+ 20,
+ client_1.get_wallet_balance,
+ [wallet_1.id()],
+ lambda x: x["confirmed_wallet_balance"],
+ initial_funds + xch_funds / 4,
+ )
+
+ finally:
+ client_0.close()
+ client_1.close()
+ await client_0.await_closed()
+ await client_1.await_closed()
+
+
+@pytest.mark.limit_consensus_modes(reason="does not depend on consensus rules")
+@pytest.mark.parametrize(
+ "trusted",
+ [True, False],
+)
+@pytest.mark.asyncio
+async def test_dao_concurrency(
+ self_hostname: str, three_wallet_nodes: SimulatorsAndWallets, trusted: bool, consensus_mode: ConsensusMode
+) -> None:
+ num_blocks = 3
+ full_nodes, wallets, _ = three_wallet_nodes
+ full_node_api = full_nodes[0]
+ full_node_server = full_node_api.server
+ wallet_node_0, server_0 = wallets[0]
+ wallet_node_1, server_1 = wallets[1]
+ wallet_node_2, server_2 = wallets[2]
+ wallet = wallet_node_0.wallet_state_manager.main_wallet
+ wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
+ wallet_2 = wallet_node_2.wallet_state_manager.main_wallet
+ ph = await wallet.get_new_puzzlehash()
+ ph_1 = await wallet_1.get_new_puzzlehash()
+ ph_2 = await wallet_2.get_new_puzzlehash()
+
+ if trusted:
+ wallet_node_0.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ wallet_node_1.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ wallet_node_2.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ else:
+ wallet_node_0.config["trusted_peers"] = {}
+ wallet_node_1.config["trusted_peers"] = {}
+ wallet_node_2.config["trusted_peers"] = {}
+
+ await server_0.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_1.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_2.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+
+ for i in range(0, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_1))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_2))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+
+ funds = sum(
+ [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks + 1)]
+ )
+
+ await time_out_assert(20, wallet.get_confirmed_balance, funds)
+ await time_out_assert(20, full_node_api.wallet_is_synced, True, wallet_node_0)
+
+ cat_amt = 300000
+ dao_rules = DAORules(
+ proposal_timelock=uint64(10),
+ soft_close_length=uint64(5),
+ attendance_required=uint64(1000), # 10%
+ pass_percentage=uint64(5100), # 51%
+ self_destruct_length=uint64(20),
+ oracle_spend_delay=uint64(10),
+ proposal_minimum_amount=uint64(101),
+ )
+
+ dao_wallet_0 = await DAOWallet.create_new_dao_and_wallet(
+ wallet_node_0.wallet_state_manager,
+ wallet,
+ uint64(cat_amt),
+ dao_rules,
+ DEFAULT_TX_CONFIG,
+ )
+ assert dao_wallet_0 is not None
+
+ # Get the full node sim to process the wallet creation spend
+ tx_queue: List[TransactionRecord] = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ tx_record = tx_queue[0]
+ await full_node_api.process_transaction_records(records=[tx_record])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # get the cat wallets
+ cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.cat_wallet_id]
+ await time_out_assert(10, cat_wallet_0.get_confirmed_balance, cat_amt)
+
+ # get the dao_cat wallet
+ dao_cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.dao_cat_wallet_id]
+
+ treasury_id = dao_wallet_0.dao_info.treasury_id
+
+ # Create the other user's wallet from the treasury id
+ dao_wallet_1 = await DAOWallet.create_new_dao_wallet_for_existing_dao(
+ wallet_node_1.wallet_state_manager,
+ wallet_1,
+ treasury_id,
+ )
+ assert dao_wallet_1 is not None
+ assert dao_wallet_1.dao_info.treasury_id == treasury_id
+
+ # Create funding spends for xch
+ xch_funds = uint64(500000)
+ funding_tx = await dao_wallet_0.create_add_funds_to_treasury_spend(xch_funds, DEFAULT_TX_CONFIG)
+ await wallet_1.wallet_state_manager.add_pending_transaction(funding_tx)
+ assert isinstance(funding_tx, TransactionRecord)
+ funding_sb = funding_tx.spend_bundle
+ assert isinstance(funding_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, funding_sb.name())
+ await full_node_api.process_transaction_records(records=[funding_tx])
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # Check that the funding spend is recognized by both dao wallets
+ await time_out_assert(10, dao_wallet_0.get_balance_by_asset_type, xch_funds)
+
+ # Send some dao_cats to wallet_1
+ # Get the cat wallets for wallet_1
+ cat_wallet_1 = dao_wallet_1.wallet_state_manager.wallets[dao_wallet_1.dao_info.cat_wallet_id]
+ dao_cat_wallet_1 = dao_wallet_1.wallet_state_manager.wallets[dao_wallet_1.dao_info.dao_cat_wallet_id]
+ assert cat_wallet_1
+ assert dao_cat_wallet_1
+
+ # Add a third wallet and check they can find proposal with accurate vote counts
+ dao_wallet_2 = await DAOWallet.create_new_dao_wallet_for_existing_dao(
+ wallet_node_2.wallet_state_manager,
+ wallet_2,
+ treasury_id,
+ )
+ assert dao_wallet_2 is not None
+ assert dao_wallet_2.dao_info.treasury_id == treasury_id
+
+ dao_cat_wallet_2 = dao_wallet_2.wallet_state_manager.wallets[dao_wallet_2.dao_info.dao_cat_wallet_id]
+ assert dao_cat_wallet_2
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ cat_tx = await cat_wallet_0.generate_signed_transaction([100000, 100000], [ph_1, ph_2], DEFAULT_TX_CONFIG)
+ cat_sb = cat_tx[0].spend_bundle
+ await wallet.wallet_state_manager.add_pending_transaction(cat_tx[0])
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, cat_sb.name())
+ await full_node_api.process_transaction_records(records=cat_tx)
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ cat_wallet_1 = dao_wallet_1.wallet_state_manager.wallets[dao_wallet_1.dao_info.cat_wallet_id]
+ await time_out_assert(10, cat_wallet_1.get_confirmed_balance, 100000)
+ cat_wallet_2 = dao_wallet_2.wallet_state_manager.wallets[dao_wallet_2.dao_info.cat_wallet_id]
+ await time_out_assert(10, cat_wallet_2.get_confirmed_balance, 100000)
+ await time_out_assert(10, cat_wallet_0.get_confirmed_balance, 100000)
+
+ # Create dao cats for voting
+ dao_cat_0_bal = await dao_cat_wallet_0.get_votable_balance()
+ assert dao_cat_0_bal == 100000
+ txs = await dao_cat_wallet_0.enter_dao_cat_voting_mode(dao_cat_0_bal, DEFAULT_TX_CONFIG)
+ for tx in txs:
+ await wallet.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb = txs[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb.name())
+ await full_node_api.process_transaction_records(records=txs)
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # Create a proposal for xch spend
+ recipient_puzzle_hash = await wallet_2.get_new_puzzlehash()
+ proposal_amount = uint64(10000)
+ xch_proposal_inner = generate_simple_proposal_innerpuz(
+ treasury_id,
+ [recipient_puzzle_hash],
+ [proposal_amount],
+ [None],
+ )
+ proposal_tx = await dao_wallet_0.generate_new_proposal(
+ xch_proposal_inner, DEFAULT_TX_CONFIG, dao_cat_0_bal, uint64(1000)
+ )
+ await wallet.wallet_state_manager.add_pending_transaction(proposal_tx)
+ assert isinstance(proposal_tx, TransactionRecord)
+ proposal_sb = proposal_tx.spend_bundle
+ assert isinstance(proposal_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, proposal_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[proposal_sb])
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # Check the proposal is saved
+ assert len(dao_wallet_0.dao_info.proposals_list) == 1
+ assert dao_wallet_0.dao_info.proposals_list[0].amount_voted == dao_cat_0_bal
+ assert dao_wallet_0.dao_info.proposals_list[0].timer_coin is not None
+
+ # Check that wallet_1 also finds and saved the proposal
+ assert len(dao_wallet_1.dao_info.proposals_list) == 1
+ prop = dao_wallet_1.dao_info.proposals_list[0]
+
+ # Give the wallet nodes a second
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ total_votes = dao_cat_0_bal
+
+ assert dao_wallet_0.dao_info.proposals_list[0].amount_voted == total_votes
+ assert dao_wallet_0.dao_info.proposals_list[0].yes_votes == total_votes
+ assert dao_wallet_1.dao_info.proposals_list[0].amount_voted == total_votes
+ assert dao_wallet_1.dao_info.proposals_list[0].yes_votes == total_votes
+
+ # Create votable dao cats and add a new vote
+ dao_cat_1_bal = await dao_cat_wallet_1.get_votable_balance()
+ txs = await dao_cat_wallet_1.enter_dao_cat_voting_mode(dao_cat_1_bal, DEFAULT_TX_CONFIG)
+ for tx in txs:
+ await wallet_1.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb = txs[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb.name())
+ await full_node_api.process_transaction_records(records=txs)
+ txs = await dao_cat_wallet_2.enter_dao_cat_voting_mode(dao_cat_1_bal, DEFAULT_TX_CONFIG)
+ for tx in txs:
+ await wallet_2.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb = txs[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb.name())
+ await full_node_api.process_transaction_records(records=txs)
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ vote_tx = await dao_wallet_1.generate_proposal_vote_spend(prop.proposal_id, dao_cat_1_bal, True, DEFAULT_TX_CONFIG)
+ await wallet_1.wallet_state_manager.add_pending_transaction(vote_tx)
+ vote_sb = vote_tx.spend_bundle
+ assert vote_sb is not None
+ vote_tx_2 = await dao_wallet_2.generate_proposal_vote_spend(
+ prop.proposal_id, dao_cat_1_bal, True, DEFAULT_TX_CONFIG
+ )
+ await wallet_2.wallet_state_manager.add_pending_transaction(vote_tx_2)
+ vote_2 = vote_tx_2.spend_bundle
+ assert vote_2 is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, vote_sb.name())
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, vote_2.name())
+
+ await time_out_assert(20, len, 1, dao_wallet_2.dao_info.proposals_list)
+ await time_out_assert(20, int, total_votes, dao_wallet_1.dao_info.proposals_list[0].amount_voted)
+ await time_out_assert(20, int, total_votes, dao_wallet_2.dao_info.proposals_list[0].amount_voted)
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await time_out_assert(20, int, total_votes * 2, dao_wallet_1.dao_info.proposals_list[0].amount_voted)
+ await time_out_assert(20, int, total_votes * 2, dao_wallet_2.dao_info.proposals_list[0].amount_voted)
+ dao_cat_1_bal = await dao_cat_wallet_1.get_votable_balance(prop.proposal_id)
+ dao_cat_2_bal = await dao_cat_wallet_2.get_votable_balance(prop.proposal_id)
+
+ assert (dao_cat_1_bal == 100000 and dao_cat_2_bal == 0) or (dao_cat_1_bal == 0 and dao_cat_2_bal == 100000)
+
+
+@pytest.mark.limit_consensus_modes(reason="does not depend on consensus rules")
+@pytest.mark.parametrize(
+ "trusted",
+ [True, False],
+)
+@pytest.mark.asyncio
+async def test_dao_cat_exits(
+ two_wallet_nodes_services: SimulatorsAndWalletsServices,
+ trusted: bool,
+ self_hostname: str,
+ consensus_mode: ConsensusMode,
+) -> None:
+ num_blocks = 3 # We're using the rpc client, so use 3 blocks to ensure we stay synced
+ [full_node_service], wallet_services, bt = two_wallet_nodes_services
+ full_node_api = full_node_service._api
+ full_node_server = full_node_api.full_node.server
+ wallet_node_0 = wallet_services[0]._node
+ server_0 = wallet_node_0.server
+ wallet_node_1 = wallet_services[1]._node
+ server_1 = wallet_node_1.server
+ wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
+ wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
+ ph_0 = await wallet_0.get_new_puzzlehash()
+ ph_1 = await wallet_1.get_new_puzzlehash()
+
+ if trusted:
+ wallet_node_0.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()}
+ wallet_node_1.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()}
+ else:
+ wallet_node_0.config["trusted_peers"] = {}
+ wallet_node_1.config["trusted_peers"] = {}
+
+ await server_0.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_1.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_0))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_1))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+
+ initial_funds = sum(
+ [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
+ )
+
+ await time_out_assert(15, wallet_0.get_confirmed_balance, initial_funds)
+ await time_out_assert(15, wallet_0.get_unconfirmed_balance, initial_funds)
+
+ assert wallet_services[0].rpc_server is not None
+ assert wallet_services[1].rpc_server is not None
+
+ client_0 = await WalletRpcClient.create(
+ self_hostname,
+ wallet_services[0].rpc_server.listen_port,
+ wallet_services[0].root_path,
+ wallet_services[0].config,
+ )
+ await validate_get_routes(client_0, wallet_services[0].rpc_server.rpc_api)
+ client_1 = await WalletRpcClient.create(
+ self_hostname,
+ wallet_services[1].rpc_server.listen_port,
+ wallet_services[1].root_path,
+ wallet_services[1].config,
+ )
+ await validate_get_routes(client_1, wallet_services[1].rpc_server.rpc_api)
+
+ try:
+ cat_amt = uint64(150000)
+ amount_of_cats = cat_amt
+ dao_rules = DAORules(
+ proposal_timelock=uint64(8),
+ soft_close_length=uint64(4),
+ attendance_required=uint64(1000), # 10%
+ pass_percentage=uint64(5100), # 51%
+ self_destruct_length=uint64(20),
+ oracle_spend_delay=uint64(10),
+ proposal_minimum_amount=uint64(1),
+ )
+ filter_amount = uint64(1)
+ fee = uint64(10000)
+
+ # create new dao
+ dao_wallet_dict_0 = await client_0.create_new_dao_wallet(
+ mode="new",
+ tx_config=DEFAULT_TX_CONFIG,
+ dao_rules=dao_rules.to_json_dict(),
+ amount_of_cats=amount_of_cats,
+ filter_amount=filter_amount,
+ name="DAO WALLET 0",
+ )
+ assert dao_wallet_dict_0["success"]
+ dao_id_0 = dao_wallet_dict_0["wallet_id"]
+ # treasury_id_hex = dao_wallet_dict_0["treasury_id"]
+ cat_wallet_0 = wallet_node_0.wallet_state_manager.wallets[dao_wallet_dict_0["cat_wallet_id"]]
+ dao_cat_wallet_0 = wallet_node_0.wallet_state_manager.wallets[dao_wallet_dict_0["dao_cat_wallet_id"]]
+
+ for _ in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(1)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await time_out_assert(60, cat_wallet_0.get_confirmed_balance, amount_of_cats)
+
+ # fund treasury
+ xch_funds = uint64(10000000000)
+ funding_tx = await client_0.dao_add_funds_to_treasury(dao_id_0, 1, xch_funds, DEFAULT_TX_CONFIG)
+ assert funding_tx["success"]
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await rpc_state(20, client_0.dao_get_treasury_balance, [dao_id_0], lambda x: x["balances"]["xch"], xch_funds)
+
+ # send cats to lockup
+ lockup_0 = await client_0.dao_send_to_lockup(dao_id_0, cat_amt, DEFAULT_TX_CONFIG)
+ assert lockup_0["success"]
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_0))
+ await asyncio.sleep(1)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ assert isinstance(dao_cat_wallet_0, DAOCATWallet)
+ await time_out_assert(60, dao_cat_wallet_0.get_confirmed_balance, cat_amt)
+
+ # create a spend proposal
+ additions = [
+ {"puzzle_hash": ph_1.hex(), "amount": 1000},
+ ]
+ proposal = await client_0.dao_create_proposal(
+ wallet_id=dao_id_0,
+ proposal_type="spend",
+ tx_config=DEFAULT_TX_CONFIG,
+ additions=additions,
+ vote_amount=cat_amt,
+ fee=fee,
+ )
+ assert proposal["success"]
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await time_out_assert_not_none(20, client_0.dao_get_proposals, dao_id_0)
+ props = await client_0.dao_get_proposals(dao_id_0)
+ proposal_id_hex = props["proposals"][0]["proposal_id"]
+
+ # check proposal state and farm enough blocks to pass
+ state = await client_0.dao_get_proposal_state(wallet_id=dao_id_0, proposal_id=proposal_id_hex)
+ assert state["success"]
+ assert state["state"]["passed"]
+
+ for _ in range(0, state["state"]["blocks_needed"]):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ state = await client_0.dao_get_proposal_state(wallet_id=dao_id_0, proposal_id=proposal_id_hex)
+ assert state["success"]
+ assert state["state"]["closable"]
+
+ # close the proposal
+ close = await client_0.dao_close_proposal(
+ wallet_id=dao_id_0, proposal_id=proposal_id_hex, tx_config=DEFAULT_TX_CONFIG, self_destruct=False, fee=fee
+ )
+ assert close["success"]
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # check proposal is closed
+ await rpc_state(20, client_0.dao_get_proposals, [dao_id_0], lambda x: x["proposals"][0]["closed"], True)
+
+ # free locked cats from finished proposal
+ res = await client_0.dao_free_coins_from_finished_proposals(wallet_id=dao_id_0, tx_config=DEFAULT_TX_CONFIG)
+ assert res["success"]
+ tx = TransactionRecord.from_json_dict(res["tx"])
+ assert tx.spend_bundle is not None
+ sb_name = tx.spend_bundle.name()
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, sb_name)
+
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ assert isinstance(dao_cat_wallet_0, DAOCATWallet)
+ assert dao_cat_wallet_0.dao_cat_info.locked_coins[0].active_votes == []
+
+ exit = await client_0.dao_exit_lockup(dao_id_0, DEFAULT_TX_CONFIG)
+ assert exit["success"]
+ for i in range(1, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await asyncio.sleep(0.5)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await time_out_assert(20, dao_cat_wallet_0.get_confirmed_balance, 0)
+ await time_out_assert(20, cat_wallet_0.get_confirmed_balance, cat_amt)
+
+ finally:
+ client_0.close()
+ client_1.close()
+ await client_0.await_closed()
+ await client_1.await_closed()
+
+
+@pytest.mark.limit_consensus_modes(reason="does not depend on consensus rules")
+@pytest.mark.parametrize(
+ "trusted",
+ [True, False],
+)
+@pytest.mark.asyncio
+async def test_dao_reorgs(
+ self_hostname: str, two_wallet_nodes: SimulatorsAndWallets, trusted: bool, consensus_mode: ConsensusMode
+) -> None:
+ num_blocks = 2
+ full_nodes, wallets, _ = two_wallet_nodes
+ full_node_api = full_nodes[0]
+ full_node_server = full_node_api.server
+ wallet_node_0, server_0 = wallets[0]
+ wallet_node_1, server_1 = wallets[1]
+ wallet = wallet_node_0.wallet_state_manager.main_wallet
+ wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
+ ph = await wallet.get_new_puzzlehash()
+ ph_1 = await wallet_1.get_new_puzzlehash()
+
+ if trusted:
+ wallet_node_0.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ wallet_node_1.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ else:
+ wallet_node_0.config["trusted_peers"] = {}
+ wallet_node_1.config["trusted_peers"] = {}
+
+ await server_0.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_1.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+
+ for i in range(0, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_1))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+
+ funds = sum(
+ [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks + 1)]
+ )
+
+ await time_out_assert(20, wallet.get_confirmed_balance, funds)
+ await time_out_assert(20, full_node_api.wallet_is_synced, True, wallet_node_0)
+
+ cat_amt = 300000
+ dao_rules = DAORules(
+ proposal_timelock=uint64(5),
+ soft_close_length=uint64(2),
+ attendance_required=uint64(1000), # 10%
+ pass_percentage=uint64(5100), # 51%
+ self_destruct_length=uint64(5),
+ oracle_spend_delay=uint64(2),
+ proposal_minimum_amount=uint64(101),
+ )
+
+ dao_wallet_0 = await DAOWallet.create_new_dao_and_wallet(
+ wallet_node_0.wallet_state_manager,
+ wallet,
+ uint64(cat_amt),
+ dao_rules,
+ DEFAULT_TX_CONFIG,
+ )
+ assert dao_wallet_0 is not None
+
+ # Get the full node sim to process the wallet creation spend
+ tx_queue: List[TransactionRecord] = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ tx_record = tx_queue[0]
+ await full_node_api.process_transaction_records(records=[tx_record])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+
+ for i in range(num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await time_out_assert(60, dao_wallet_0.get_confirmed_balance, uint128(1))
+
+ # Test Reorg on creation
+ height = full_node_api.full_node.blockchain.get_peak_height()
+ if height is None: # pragma: no cover
+ assert False
+ await full_node_api.reorg_from_index_to_new_index(
+ ReorgProtocol(uint32(height - 2), uint32(height + 1), puzzle_hash_0, None)
+ )
+
+ assert dao_wallet_0.dao_info.current_treasury_coin
+ await time_out_assert(60, dao_wallet_0.get_confirmed_balance, uint128(1))
+
+ # get the cat wallets
+ cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.cat_wallet_id]
+ await time_out_assert(10, cat_wallet_0.get_confirmed_balance, cat_amt)
+
+ # get the dao_cat wallet
+ dao_cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.dao_cat_wallet_id]
+
+ treasury_id = dao_wallet_0.dao_info.treasury_id
+
+ # Create the other user's wallet from the treasury id
+ dao_wallet_1 = await DAOWallet.create_new_dao_wallet_for_existing_dao(
+ wallet_node_1.wallet_state_manager,
+ wallet_1,
+ treasury_id,
+ )
+ assert dao_wallet_1 is not None
+ assert dao_wallet_1.dao_info.treasury_id == treasury_id
+
+ # Create funding spends for xch
+ xch_funds = uint64(500000)
+ funding_tx = await dao_wallet_0.create_add_funds_to_treasury_spend(
+ xch_funds,
+ DEFAULT_TX_CONFIG,
+ )
+ await wallet.wallet_state_manager.add_pending_transaction(funding_tx)
+ assert isinstance(funding_tx, TransactionRecord)
+ funding_sb = funding_tx.spend_bundle
+ assert isinstance(funding_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, funding_sb.name())
+ await full_node_api.process_transaction_records(records=[funding_tx])
+
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # Check that the funding spend is recognized by both dao wallets
+ await time_out_assert(10, dao_wallet_0.get_balance_by_asset_type, xch_funds)
+ await time_out_assert(10, dao_wallet_1.get_balance_by_asset_type, xch_funds)
+
+ # Reorg funding spend
+ height = full_node_api.full_node.blockchain.get_peak_height()
+ if height is None: # pragma: no cover
+ assert False
+ await full_node_api.reorg_from_index_to_new_index(
+ ReorgProtocol(uint32(height - 1), uint32(height + 1), puzzle_hash_0, None)
+ )
+ await time_out_assert(10, dao_wallet_0.get_balance_by_asset_type, xch_funds)
+ await time_out_assert(10, dao_wallet_1.get_balance_by_asset_type, xch_funds)
+
+ # Send some dao_cats to wallet_1
+ # Get the cat wallets for wallet_1
+ cat_wallet_1 = dao_wallet_1.wallet_state_manager.wallets[dao_wallet_1.dao_info.cat_wallet_id]
+ dao_cat_wallet_1 = dao_wallet_1.wallet_state_manager.wallets[dao_wallet_1.dao_info.dao_cat_wallet_id]
+ assert cat_wallet_1
+ assert dao_cat_wallet_1
+
+ cat_tx = await cat_wallet_0.generate_signed_transaction(
+ [100000],
+ [ph_1],
+ DEFAULT_TX_CONFIG,
+ )
+ cat_sb = cat_tx[0].spend_bundle
+ await wallet.wallet_state_manager.add_pending_transaction(cat_tx[0])
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, cat_sb.name())
+ await full_node_api.process_transaction_records(records=cat_tx)
+
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ cat_wallet_1 = dao_wallet_1.wallet_state_manager.wallets[dao_wallet_1.dao_info.cat_wallet_id]
+ await time_out_assert(10, cat_wallet_1.get_confirmed_balance, 100000)
+
+ # Create dao cats for voting
+ dao_cat_0_bal = await dao_cat_wallet_0.get_votable_balance()
+ assert dao_cat_0_bal == 200000
+ txs = await dao_cat_wallet_0.enter_dao_cat_voting_mode(dao_cat_0_bal, DEFAULT_TX_CONFIG)
+ for tx in txs:
+ await wallet.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb = txs[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb.name())
+ await full_node_api.process_transaction_records(records=txs)
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # Create a proposal for xch spend
+ recipient_puzzle_hash = await wallet.get_new_puzzlehash()
+ proposal_amount = uint64(10000)
+ xch_proposal_inner = generate_simple_proposal_innerpuz(
+ treasury_id,
+ [recipient_puzzle_hash],
+ [proposal_amount],
+ [None],
+ )
+ proposal_tx = await dao_wallet_0.generate_new_proposal(
+ xch_proposal_inner, DEFAULT_TX_CONFIG, dao_cat_0_bal, uint64(1000)
+ )
+ await wallet.wallet_state_manager.add_pending_transaction(proposal_tx)
+ assert isinstance(proposal_tx, TransactionRecord)
+ proposal_sb = proposal_tx.spend_bundle
+ assert isinstance(proposal_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, proposal_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[proposal_sb])
+
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ # Check the proposal is saved
+ assert len(dao_wallet_0.dao_info.proposals_list) == 1
+ assert dao_wallet_0.dao_info.proposals_list[0].amount_voted == dao_cat_0_bal
+ assert dao_wallet_0.dao_info.proposals_list[0].timer_coin is not None
+
+ # Reorg proposal creation
+ height = full_node_api.full_node.blockchain.get_peak_height()
+ if height is None: # pragma: no cover
+ assert False
+ await full_node_api.reorg_from_index_to_new_index(
+ ReorgProtocol(uint32(height - 1), uint32(height + 1), puzzle_hash_0, None)
+ )
+ assert len(dao_wallet_0.dao_info.proposals_list) == 1
+ assert dao_wallet_0.dao_info.proposals_list[0].amount_voted == dao_cat_0_bal
+ assert dao_wallet_0.dao_info.proposals_list[0].timer_coin is not None
+
+ # Check that wallet_1 also finds and saved the proposal
+ assert len(dao_wallet_1.dao_info.proposals_list) == 1
+ prop = dao_wallet_1.dao_info.proposals_list[0]
+
+ total_votes = dao_cat_0_bal
+
+ assert dao_wallet_0.dao_info.proposals_list[0].amount_voted == total_votes
+ assert dao_wallet_0.dao_info.proposals_list[0].yes_votes == total_votes
+ assert dao_wallet_1.dao_info.proposals_list[0].amount_voted == total_votes
+ assert dao_wallet_1.dao_info.proposals_list[0].yes_votes == total_votes
+
+ # Create votable dao cats and add a new vote
+ dao_cat_1_bal = await dao_cat_wallet_1.get_votable_balance()
+ txs = await dao_cat_wallet_1.enter_dao_cat_voting_mode(dao_cat_1_bal, DEFAULT_TX_CONFIG)
+ for tx in txs:
+ await wallet.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb = txs[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb.name())
+ await full_node_api.process_transaction_records(records=txs)
+
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ vote_tx = await dao_wallet_1.generate_proposal_vote_spend(prop.proposal_id, dao_cat_1_bal, True, DEFAULT_TX_CONFIG)
+ await wallet_1.wallet_state_manager.add_pending_transaction(vote_tx)
+ vote_sb = vote_tx.spend_bundle
+ assert vote_sb is not None
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, vote_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[vote_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ assert dao_wallet_0.dao_info.proposals_list[0].amount_voted == dao_cat_0_bal + dao_cat_1_bal
+ assert dao_wallet_0.dao_info.proposals_list[0].yes_votes == dao_cat_0_bal + dao_cat_1_bal
+ assert dao_wallet_1.dao_info.proposals_list[0].amount_voted == dao_cat_0_bal + dao_cat_1_bal
+ assert dao_wallet_1.dao_info.proposals_list[0].yes_votes == dao_cat_0_bal + dao_cat_1_bal
+
+ # Reorg on vote spend
+ height = full_node_api.full_node.blockchain.get_peak_height()
+ if height is None: # pragma: no cover
+ assert False
+ await full_node_api.reorg_from_index_to_new_index(
+ ReorgProtocol(uint32(height - 1), uint32(height + 1), puzzle_hash_0, None)
+ )
+ assert dao_wallet_0.dao_info.proposals_list[0].amount_voted == dao_cat_0_bal + dao_cat_1_bal
+ assert dao_wallet_0.dao_info.proposals_list[0].yes_votes == dao_cat_0_bal + dao_cat_1_bal
+ assert dao_wallet_1.dao_info.proposals_list[0].amount_voted == dao_cat_0_bal + dao_cat_1_bal
+ assert dao_wallet_1.dao_info.proposals_list[0].yes_votes == dao_cat_0_bal + dao_cat_1_bal
+
+ # Close proposal
+ for i in range(5):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ close_tx = await dao_wallet_0.create_proposal_close_spend(prop.proposal_id, DEFAULT_TX_CONFIG, fee=uint64(100))
+ await wallet.wallet_state_manager.add_pending_transaction(close_tx)
+ close_sb = close_tx.spend_bundle
+ assert close_sb is not None
+ await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, close_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[close_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+
+ await time_out_assert(20, get_proposal_state, (True, True), *[dao_wallet_0, 0])
+ await time_out_assert(20, get_proposal_state, (True, True), *[dao_wallet_1, 0])
+
+ # Reorg closed proposal
+ height = full_node_api.full_node.blockchain.get_peak_height()
+ if height is None: # pragma: no cover
+ assert False
+ await full_node_api.reorg_from_index_to_new_index(
+ ReorgProtocol(uint32(height - 1), uint32(height + 1), puzzle_hash_0, None)
+ )
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, timeout=30)
+ await time_out_assert(20, get_proposal_state, (True, True), *[dao_wallet_0, 0])
+ await time_out_assert(20, get_proposal_state, (True, True), *[dao_wallet_1, 0])
+
+
+@pytest.mark.limit_consensus_modes(reason="does not depend on consensus rules")
+@pytest.mark.parametrize(
+ "trusted",
+ [True, False],
+)
+@pytest.mark.asyncio
+async def test_dao_votes(
+ self_hostname: str, three_wallet_nodes: SimulatorsAndWallets, trusted: bool, consensus_mode: ConsensusMode
+) -> None:
+ num_blocks = 1
+ full_nodes, wallets, _ = three_wallet_nodes
+ full_node_api = full_nodes[0]
+ full_node_server = full_node_api.server
+ wallet_node_0, server_0 = wallets[0]
+ wallet_node_1, server_1 = wallets[1]
+ wallet_node_2, server_2 = wallets[2]
+ wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
+ wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
+ wallet_2 = wallet_node_2.wallet_state_manager.main_wallet
+ ph_0 = await wallet_0.get_new_puzzlehash()
+ ph_1 = await wallet_1.get_new_puzzlehash()
+ ph_2 = await wallet_2.get_new_puzzlehash()
+
+ if trusted:
+ wallet_node_0.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ wallet_node_1.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ wallet_node_2.config["trusted_peers"] = {
+ full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
+ }
+ else:
+ wallet_node_0.config["trusted_peers"] = {}
+ wallet_node_1.config["trusted_peers"] = {}
+ wallet_node_2.config["trusted_peers"] = {}
+
+ await server_0.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_1.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+ await server_2.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None)
+
+ for i in range(0, num_blocks):
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_0))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_1))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_2))
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+
+ funds = sum(
+ [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks + 1)]
+ )
+
+ await time_out_assert(20, wallet_0.get_confirmed_balance, funds)
+ await time_out_assert(20, full_node_api.wallet_is_synced, True, wallet_node_0)
+
+ # set a standard fee amount to use in all txns
+ base_fee = uint64(100)
+
+ # set the cat issuance and DAO rules
+ cat_issuance = 300000
+ proposal_min_amt = uint64(101)
+ dao_rules = DAORules(
+ proposal_timelock=uint64(10),
+ soft_close_length=uint64(5),
+ attendance_required=uint64(190000),
+ pass_percentage=uint64(5100), # 51%
+ self_destruct_length=uint64(20),
+ oracle_spend_delay=uint64(10),
+ proposal_minimum_amount=proposal_min_amt,
+ )
+
+ dao_wallet_0 = await DAOWallet.create_new_dao_and_wallet(
+ wallet_node_0.wallet_state_manager,
+ wallet_0,
+ uint64(cat_issuance),
+ dao_rules,
+ DEFAULT_TX_CONFIG,
+ )
+ assert dao_wallet_0 is not None
+
+ tx_queue: List[TransactionRecord] = await wallet_node_0.wallet_state_manager.tx_store.get_not_sent()
+ tx_record = tx_queue[0]
+ await full_node_api.process_transaction_records(records=[tx_record])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.cat_wallet_id]
+ dao_cat_wallet_0 = dao_wallet_0.wallet_state_manager.wallets[dao_wallet_0.dao_info.dao_cat_wallet_id]
+ await time_out_assert(10, cat_wallet_0.get_confirmed_balance, cat_issuance)
+ assert dao_cat_wallet_0
+
+ treasury_id = dao_wallet_0.dao_info.treasury_id
+
+ dc_1 = uint64(100000)
+ dc_2 = uint64(50000)
+ dc_3 = uint64(30000)
+ dc_4 = uint64(20000)
+ dc_5 = uint64(10000)
+
+ # Lockup voting cats for all wallets
+ txs = await dao_cat_wallet_0.enter_dao_cat_voting_mode(dc_1, DEFAULT_TX_CONFIG, fee=base_fee)
+ for tx in txs:
+ await wallet_0.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb_0 = txs[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb_0.name())
+ await full_node_api.process_transaction_records(records=txs)
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ txs = await dao_cat_wallet_0.enter_dao_cat_voting_mode(dc_2, DEFAULT_TX_CONFIG, fee=base_fee)
+ for tx in txs:
+ await wallet_0.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb_0 = txs[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb_0.name())
+ await full_node_api.process_transaction_records(records=txs)
+
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ txs = await dao_cat_wallet_0.enter_dao_cat_voting_mode(dc_3, DEFAULT_TX_CONFIG, fee=base_fee)
+ for tx in txs:
+ await wallet_0.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb_0 = txs[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb_0.name())
+ await full_node_api.process_transaction_records(records=txs)
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ txs = await dao_cat_wallet_0.enter_dao_cat_voting_mode(dc_4, DEFAULT_TX_CONFIG, fee=base_fee)
+ for tx in txs:
+ await wallet_0.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb_0 = txs[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb_0.name())
+ await full_node_api.process_transaction_records(records=txs)
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ txs = await dao_cat_wallet_0.enter_dao_cat_voting_mode(dc_5, DEFAULT_TX_CONFIG, fee=base_fee)
+ for tx in txs:
+ await wallet_0.wallet_state_manager.add_pending_transaction(tx)
+ dao_cat_sb_0 = txs[0].spend_bundle
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, dao_cat_sb_0.name())
+ await full_node_api.process_transaction_records(records=txs)
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ await time_out_assert(10, dao_cat_wallet_0.get_confirmed_balance, dc_1 + dc_2 + dc_3 + dc_4 + dc_5)
+
+ # Create funding spend so the treasury holds some XCH
+ xch_funds = uint64(500000)
+ funding_tx = await dao_wallet_0.create_add_funds_to_treasury_spend(xch_funds, DEFAULT_TX_CONFIG)
+ await wallet_0.wallet_state_manager.add_pending_transaction(funding_tx)
+ funding_sb = funding_tx.spend_bundle
+ assert isinstance(funding_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, funding_sb.name())
+ await full_node_api.process_transaction_records(records=[funding_tx])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ # Check that the funding spend is recognized by all wallets
+ await time_out_assert(10, dao_wallet_0.get_balance_by_asset_type, xch_funds)
+
+ # Create Proposals
+ recipient_puzzle_hash = await wallet_2.get_new_puzzlehash()
+ proposal_amount_1 = uint64(9998)
+ xch_proposal_inner = generate_simple_proposal_innerpuz(
+ treasury_id,
+ [recipient_puzzle_hash],
+ [proposal_amount_1],
+ [None],
+ )
+
+ vote_1 = uint64(120000)
+ vote_2 = uint64(150000)
+
+ proposal_tx = await dao_wallet_0.generate_new_proposal(xch_proposal_inner, DEFAULT_TX_CONFIG, vote_1, fee=base_fee)
+ await wallet_0.wallet_state_manager.add_pending_transaction(proposal_tx)
+ assert isinstance(proposal_tx, TransactionRecord)
+ proposal_sb = proposal_tx.spend_bundle
+ assert isinstance(proposal_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, proposal_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[proposal_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ assert len(dao_wallet_0.dao_info.proposals_list) == 1
+ assert dao_wallet_0.dao_info.proposals_list[0].amount_voted == vote_1
+ assert dao_wallet_0.dao_info.proposals_list[0].timer_coin is not None
+ prop_0 = dao_wallet_0.dao_info.proposals_list[0]
+
+ proposal_tx = await dao_wallet_0.generate_new_proposal(xch_proposal_inner, DEFAULT_TX_CONFIG, vote_2, fee=base_fee)
+ await wallet_0.wallet_state_manager.add_pending_transaction(proposal_tx)
+ assert isinstance(proposal_tx, TransactionRecord)
+ proposal_sb = proposal_tx.spend_bundle
+ assert isinstance(proposal_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, proposal_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[proposal_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ assert len(dao_wallet_0.dao_info.proposals_list) == 2
+ assert dao_wallet_0.dao_info.proposals_list[1].amount_voted == vote_2
+
+ vote_3 = uint64(30000)
+ vote_tx = await dao_wallet_0.generate_proposal_vote_spend(prop_0.proposal_id, vote_3, True, DEFAULT_TX_CONFIG)
+ await wallet_0.wallet_state_manager.add_pending_transaction(vote_tx)
+ assert isinstance(vote_tx, TransactionRecord)
+ vote_sb = vote_tx.spend_bundle
+ assert isinstance(vote_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, vote_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[vote_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ assert dao_wallet_0.dao_info.proposals_list[0].amount_voted == vote_1 + vote_3
+
+ vote_4 = uint64(60000)
+ vote_tx = await dao_wallet_0.generate_proposal_vote_spend(prop_0.proposal_id, vote_4, True, DEFAULT_TX_CONFIG)
+ await wallet_0.wallet_state_manager.add_pending_transaction(vote_tx)
+ assert isinstance(vote_tx, TransactionRecord)
+ vote_sb = vote_tx.spend_bundle
+ assert isinstance(vote_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, vote_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[vote_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ assert dao_wallet_0.dao_info.proposals_list[0].amount_voted == vote_1 + vote_3 + vote_4
+
+ vote_5 = uint64(1)
+ proposal_tx = await dao_wallet_0.generate_new_proposal(xch_proposal_inner, DEFAULT_TX_CONFIG, vote_5, fee=base_fee)
+ await wallet_0.wallet_state_manager.add_pending_transaction(proposal_tx)
+ assert isinstance(proposal_tx, TransactionRecord)
+ proposal_sb = proposal_tx.spend_bundle
+ assert isinstance(proposal_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, proposal_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[proposal_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ assert len(dao_wallet_0.dao_info.proposals_list) == 3
+ assert dao_wallet_0.dao_info.proposals_list[2].amount_voted == vote_5
+ prop_2 = dao_wallet_0.dao_info.proposals_list[2]
+
+ vote_6 = uint64(20000)
+ for i in range(10):
+ vote_tx = await dao_wallet_0.generate_proposal_vote_spend(prop_2.proposal_id, vote_6, True, DEFAULT_TX_CONFIG)
+ await wallet_0.wallet_state_manager.add_pending_transaction(vote_tx)
+ assert isinstance(vote_tx, TransactionRecord)
+ vote_sb = vote_tx.spend_bundle
+ assert isinstance(vote_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, vote_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[vote_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ assert dao_wallet_0.dao_info.proposals_list[2].amount_voted == 200001
+
+ close_tx = await dao_wallet_0.create_proposal_close_spend(prop_0.proposal_id, DEFAULT_TX_CONFIG)
+ await wallet_0.wallet_state_manager.add_pending_transaction(close_tx)
+ assert isinstance(close_tx, TransactionRecord)
+ close_sb = close_tx.spend_bundle
+ assert isinstance(close_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, close_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[close_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+
+ proposal_tx = await dao_wallet_0.generate_new_proposal(xch_proposal_inner, DEFAULT_TX_CONFIG, fee=base_fee)
+ await wallet_0.wallet_state_manager.add_pending_transaction(proposal_tx)
+ assert isinstance(proposal_tx, TransactionRecord)
+ proposal_sb = proposal_tx.spend_bundle
+ assert isinstance(proposal_sb, SpendBundle)
+ await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, proposal_sb.name())
+ await full_node_api.process_spend_bundles(bundles=[proposal_sb])
+ await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash_0))
+ await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=30)
+ assert dao_wallet_0.dao_info.proposals_list[3].amount_voted == 210000
diff --git a/tests/wallet/test_singleton_store.py b/tests/wallet/test_singleton_store.py
new file mode 100644
--- /dev/null
+++ b/tests/wallet/test_singleton_store.py
@@ -0,0 +1,152 @@
+from __future__ import annotations
+
+# import dataclasses
+from secrets import token_bytes
+
+import pytest
+
+from chia.types.blockchain_format.coin import Coin
+from chia.types.blockchain_format.program import Program
+from chia.types.blockchain_format.sized_bytes import bytes32
+from chia.types.coin_spend import CoinSpend
+from chia.util.ints import uint32, uint64
+
+# from chia.wallet.dao_wallet.dao_wallet import DAOInfo, DAOWallet
+from chia.wallet.lineage_proof import LineageProof
+from chia.wallet.singleton import create_singleton_puzzle
+from chia.wallet.singleton_record import SingletonRecord
+from chia.wallet.wallet_singleton_store import WalletSingletonStore
+from tests.util.db_connection import DBConnection
+
+
+def get_record(wallet_id: uint32 = uint32(2)) -> SingletonRecord:
+ launcher_id = bytes32(token_bytes(32))
+ inner_puz = Program.to(1)
+ inner_puz_hash = inner_puz.get_tree_hash()
+ parent_puz = create_singleton_puzzle(inner_puz, launcher_id)
+ parent_puz_hash = parent_puz.get_tree_hash()
+ parent_coin = Coin(launcher_id, parent_puz_hash, 1)
+ inner_sol = Program.to([[51, inner_puz_hash, 1]])
+ lineage_proof = LineageProof(launcher_id, inner_puz.get_tree_hash(), uint64(1))
+ parent_sol = Program.to([lineage_proof.to_program(), 1, inner_sol])
+ parent_coinspend = CoinSpend(parent_coin, parent_puz, parent_sol)
+ pending = True
+ removed_height = 0
+ custom_data = "{'key': 'value'}"
+ record = SingletonRecord(
+ coin=parent_coin,
+ singleton_id=launcher_id,
+ wallet_id=wallet_id,
+ parent_coinspend=parent_coinspend,
+ inner_puzzle_hash=inner_puz_hash,
+ pending=pending,
+ removed_height=removed_height,
+ lineage_proof=lineage_proof,
+ custom_data=custom_data,
+ )
+ return record
+
+
+class TestSingletonStore:
+ @pytest.mark.asyncio
+ async def test_singleton_insert(self) -> None:
+ async with DBConnection(1) as wrapper:
+ db = await WalletSingletonStore.create(wrapper)
+ record = get_record()
+ await db.save_singleton(record)
+ records_by_wallet = await db.get_records_by_wallet_id(record.wallet_id)
+ assert records_by_wallet[0] == record
+ record_by_coin_id = await db.get_records_by_coin_id(record.coin.name())
+ assert record_by_coin_id[0] == record
+ records_by_singleton_id = await db.get_records_by_singleton_id(record.singleton_id)
+ assert records_by_singleton_id[0] == record
+ # update pending
+ await db.update_pending_transaction(record.coin.name(), False)
+ record_to_check = (await db.get_records_by_coin_id(record.coin.name()))[0]
+ assert record_to_check.pending is False
+ assert record_to_check.custom_data == "{'key': 'value'}"
+
+ @pytest.mark.asyncio
+ async def test_singleton_add_spend(self) -> None:
+ async with DBConnection(1) as wrapper:
+ db = await WalletSingletonStore.create(wrapper)
+ record = get_record()
+ child_coin = Coin(record.coin.name(), record.coin.puzzle_hash, 1)
+ parent_coinspend = record.parent_coinspend
+
+ # test add spend
+ await db.add_spend(uint32(2), parent_coinspend, uint32(10))
+ record_by_id = (await db.get_records_by_coin_id(child_coin.name()))[0]
+ assert record_by_id
+
+ # Test adding a non-singleton will fail
+ inner_puz = Program.to(1)
+ inner_puz_hash = inner_puz.get_tree_hash()
+ bad_coin = Coin(record.singleton_id, inner_puz_hash, 1)
+ inner_sol = Program.to([[51, inner_puz_hash, 1]])
+ bad_coinspend = CoinSpend(bad_coin, inner_puz, inner_sol)
+ with pytest.raises(RuntimeError) as e_info:
+ await db.add_spend(uint32(2), bad_coinspend, uint32(10))
+ assert e_info.value.args[0] == "Coin to add is not a valid singleton"
+
+ @pytest.mark.asyncio
+ async def test_singleton_remove(self) -> None:
+ async with DBConnection(1) as wrapper:
+ db = await WalletSingletonStore.create(wrapper)
+ record_1 = get_record()
+ record_2 = get_record()
+ await db.save_singleton(record_1)
+ await db.save_singleton(record_2)
+ resp_1 = await db.delete_singleton_by_coin_id(record_1.coin.name(), uint32(1))
+ assert resp_1
+ resp_2 = await db.delete_singleton_by_singleton_id(record_2.singleton_id, uint32(1))
+ assert resp_2
+ record = (await db.get_records_by_coin_id(record_1.coin.name()))[0]
+ assert record.removed_height == 1
+ record = (await db.get_records_by_coin_id(record_2.coin.name()))[0]
+ assert record.removed_height == 1
+ # delete a non-existing coin id
+ fake_id = bytes32(b"x" * 32)
+ resp_3 = await db.delete_singleton_by_coin_id(fake_id, uint32(10))
+ assert not resp_3
+ # delete a non-existing singleton id
+ resp_4 = await db.delete_singleton_by_singleton_id(fake_id, uint32(10))
+ assert not resp_4
+
+ @pytest.mark.asyncio
+ async def test_singleton_delete_wallet(self) -> None:
+ async with DBConnection(1) as wrapper:
+ db = await WalletSingletonStore.create(wrapper)
+ for i in range(1, 5):
+ wallet_id = uint32(i)
+ for _ in range(5):
+ record = get_record(wallet_id)
+ await db.save_singleton(record)
+ assert not (await db.is_empty(wallet_id))
+
+ for j in range(1, 5):
+ wallet_id = uint32(j)
+ start_count = await db.count()
+ await db.delete_wallet(wallet_id)
+ assert (await db.count(wallet_id)) == 0
+ assert await db.is_empty(wallet_id)
+ end_count = await db.count()
+ assert end_count == start_count - 5
+
+ assert await db.is_empty()
+
+ @pytest.mark.asyncio
+ async def test_singleton_reorg(self) -> None:
+ async with DBConnection(1) as wrapper:
+ db = await WalletSingletonStore.create(wrapper)
+ record = get_record()
+ # save the singleton
+ await db.save_singleton(record)
+ # delete it at block 10
+ await db.delete_singleton_by_coin_id(record.coin.name(), uint32(10))
+ record_by_id = (await db.get_records_by_coin_id(record.coin.name()))[0]
+ assert record_by_id.removed_height == 10
+ # rollback
+ await db.rollback(5, uint32(2))
+ reorged_record_by_id = await db.get_records_by_coin_id(record.coin.name())
+ assert not reorged_record_by_id
| DAOWallet exit_lockup exception [Bug]
### What happened?
I have a DAO without any active proposals:
`chia dao list_proposals -i 3 -f 2408648282`
```
############################
Proposals have 2 blocks of soft close time.
############################
```
The DAO's wallet has 74,000 CATs locked up:
`chia wallet show -f 2408648282`
```
Wallet height: 3256731
Sync status: Synced
Balances, fingerprint: 2408648282
Chia Wallet:
-Total Balance: 1.499558899998 txch (1499558899998 mojo)
-Pending Total Balance: 1.499558899998 txch (1499558899998 mojo)
-Spendable: 1.499558899998 txch (1499558899998 mojo)
-Type: STANDARD_WALLET
-Wallet ID: 1
Profile 1:
-Total Balance: 1.0
-Pending Total Balance: 1.0
-Spendable: 1.0
-Type: DAO
-Treasury ID: 25062337440ad9d6d9b7596df783c77986389c1ccd5f2ce13c2c11200ed0c55f
-Wallet ID: 3
CAT 713bb7e50d4bb570...:
-Total Balance: 26.0 (26000 mojo)
-Pending Total Balance: 26.0 (26000 mojo)
-Spendable: 26.0 (26000 mojo)
-Type: CAT
-Asset ID: 713bb7e50d4bb570b8bbd3267e2d010f2807e426e4711645a5239665688b18ce
-Wallet ID: 4
CAT 713bb7e50d4bb570...:
-Total Balance: 74000.0
-Pending Total Balance: 0.0
-Spendable: 0.0
-Type: DAO_CAT
-Asset ID: 00000000000000030000000000000004713bb7e50d4bb570b8bbd3267e2d010f
-Wallet ID: 5
```
I receive an error when I attempt to run the `exit_lockup` command:
`chia dao exit_lockup -i 3 -m 0.00001 -f 2408648282`
```
Exception from 'wallet' {'error': "'DAOWallet' object has no attribute 'create_tandem_xch_tx'", 'success': False, 'traceback': 'Traceback (most recent call last):\n File "C:\\Users\\User\\Chia\\dao-wallet\\chia-blockchain\\chia\\rpc\\util.py", line 21, in inner\n res_object = await f(request_data)\n File "C:\\Users\\User\\Chia\\dao-wallet\\chia-blockchain\\chia\\rpc\\util.py", line 82, in rpc_endpoint\n return await func(self, request, *args, tx_config=tx_config, extra_conditions=extra_conditions, **kwargs)\n File "C:\\Users\\User\\Chia\\dao-wallet\\chia-blockchain\\chia\\rpc\\wallet_rpc_api.py", line 2718, in dao_exit_lockup\n exit_tx = await dao_cat_wallet.exit_vote_state(\n File "C:\\Users\\User\\Chia\\dao-wallet\\chia-blockchain\\chia\\wallet\\cat_wallet\\dao_cat_wallet.py", line 460, in exit_vote_state\n chia_tx = await dao_wallet.create_tandem_xch_tx(\nAttributeError: \'DAOWallet\' object has no attribute \'create_tandem_xch_tx\'\n'}:
Traceback (most recent call last):
File "C:\Users\User\Chia\dao-wallet\chia-blockchain\chia\cmds\cmds_util.py", line 119, in get_any_service_client
yield node_client, config
File "C:\Users\User\Chia\dao-wallet\chia-blockchain\chia\cmds\cmds_util.py", line 235, in get_wallet_client
yield wallet_client, new_fp, config
File "C:\Users\User\Chia\dao-wallet\chia-blockchain\chia\cmds\dao_funcs.py", line 423, in exit_lockup
res = await wallet_client.dao_exit_lockup(
File "C:\Users\User\Chia\dao-wallet\chia-blockchain\chia\rpc\wallet_rpc_client.py", line 1556, in dao_exit_lockup
response = await self.fetch("dao_exit_lockup", request)
File "C:\Users\User\Chia\dao-wallet\chia-blockchain\chia\rpc\rpc_client.py", line 61, in fetch
raise ValueError(res_json)
ValueError: {'error': "'DAOWallet' object has no attribute 'create_tandem_xch_tx'", 'success': False, 'traceback': 'Traceback (most recent call last):\n File "C:\\Users\\User\\Chia\\dao-wallet\\chia-blockchain\\chia\\rpc\\util.py", line 21, in inner\n res_object = await f(request_data)\n File "C:\\Users\\User\\Chia\\dao-wallet\\chia-blockchain\\chia\\rpc\\util.py", line 82, in rpc_endpoint\n return await func(self, request, *args, tx_config=tx_config, extra_conditions=extra_conditions, **kwargs)\n File "C:\\Users\\User\\Chia\\dao-wallet\\chia-blockchain\\chia\\rpc\\wallet_rpc_api.py", line 2718, in dao_exit_lockup\n exit_tx = await dao_cat_wallet.exit_vote_state(\n File "C:\\Users\\User\\Chia\\dao-wallet\\chia-blockchain\\chia\\wallet\\cat_wallet\\dao_cat_wallet.py", line 460, in exit_vote_state\n chia_tx = await dao_wallet.create_tandem_xch_tx(\nAttributeError: \'DAOWallet\' object has no attribute \'create_tandem_xch_tx\'\n'}
```
### Version
2.0.1b3.dev316 (dao-wallet branch)
### What platform are you using?
Windows
### What ui mode are you using?
CLI
### Relevant log output
_No response_
Can't create DAO with single coin [Bug]
### What happened?
When creating a DAO, the wallet must hold two coins: One to create the DAO, and one to mint the DAO CATs. This is an issue because I would expect most new DAOs to be created from a new wallet, which would receive a single coin from another wallet.
For example, the following command to create a DAO:
```
chia dao create --proposal-timelock 10 --soft-close 2 --attendance-required 1000 --pass-percentage 5000 --self-destruct 10 --oracle-delay 5 --proposal-minimum 0.000001 --filter-amount 1 --cat-amount 100000 -m 0.00001 --fee-for-cat 0.00001
```
Will fail with this error if only one coin is present:
```
Exception from 'wallet' {'error': 'Transaction for 100000 is greater than spendable balance of 0. There may be other transactions pending or our minimum coin amount is too high.', 'success': False}
```
I will document this on our website, but it would be better if either:
1. The error message could be more specific that another coin is required, or
2. (better) The DAO could automatically (or maybe prompt to) split the coin prior to creating the DAO, or
3. Create the DAO first, wait for change, and then create the DAO CATs
### Version
2.0.1b3.dev316 (dao-wallet branch)
### What platform are you using?
Windows
### What ui mode are you using?
CLI
### Relevant log output
_No response_
| 2023-10-03T15:42:30Z | [] | [] |
|
Chia-Network/chia-blockchain | 17,523 | Chia-Network__chia-blockchain-17523 | [
"16995"
] | 76bb49c4fdc4e4ba70a0964fbbebd9168b005796 | diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py
--- a/chia/rpc/wallet_rpc_api.py
+++ b/chia/rpc/wallet_rpc_api.py
@@ -483,14 +483,31 @@ async def _check_key_used_for_rewards(
return False, False
config: Dict[str, Any] = load_config(new_root, "config.yaml")
- farmer_target = config["farmer"].get("xch_target_address")
- pool_target = config["pool"].get("xch_target_address")
- address_to_check: List[bytes32] = [decode_puzzle_hash(farmer_target), decode_puzzle_hash(pool_target)]
+ farmer_target = config["farmer"].get("xch_target_address", "")
+ pool_target = config["pool"].get("xch_target_address", "")
+ address_to_check: List[bytes32] = []
+
+ try:
+ farmer_decoded = decode_puzzle_hash(farmer_target)
+ address_to_check.append(farmer_decoded)
+ except ValueError:
+ farmer_decoded = None
+
+ try:
+ pool_decoded = decode_puzzle_hash(pool_target)
+ address_to_check.append(pool_decoded)
+ except ValueError:
+ pool_decoded = None
found_addresses: Set[bytes32] = match_address_to_sk(sk, address_to_check, max_ph_to_search)
+ found_farmer = False
+ found_pool = False
+
+ if farmer_decoded is not None:
+ found_farmer = farmer_decoded in found_addresses
- found_farmer = address_to_check[0] in found_addresses
- found_pool = address_to_check[1] in found_addresses
+ if pool_decoded is not None:
+ found_pool = pool_decoded in found_addresses
return found_farmer, found_pool
| diff --git a/tests/wallet/rpc/test_wallet_rpc.py b/tests/wallet/rpc/test_wallet_rpc.py
--- a/tests/wallet/rpc/test_wallet_rpc.py
+++ b/tests/wallet/rpc/test_wallet_rpc.py
@@ -1610,6 +1610,45 @@ async def have_nfts():
}
+async def _check_delete_key(
+ client: WalletRpcClient, wallet_node: WalletNode, farmer_fp: int, pool_fp: int, observer: bool = False
+) -> None:
+ # Add in reward addresses into farmer and pool for testing delete key checks
+ # set farmer to first private key
+ create_sk = master_sk_to_wallet_sk_unhardened if observer else master_sk_to_wallet_sk
+
+ sk = await wallet_node.get_key_for_fingerprint(farmer_fp)
+ assert sk is not None
+ farmer_ph = create_puzzlehash_for_pk(create_sk(sk, uint32(0)).get_g1())
+
+ sk = await wallet_node.get_key_for_fingerprint(pool_fp)
+ assert sk is not None
+ pool_ph = create_puzzlehash_for_pk(create_sk(sk, uint32(0)).get_g1())
+
+ with lock_and_load_config(wallet_node.root_path, "config.yaml") as test_config:
+ test_config["farmer"]["xch_target_address"] = encode_puzzle_hash(farmer_ph, "txch")
+ test_config["pool"]["xch_target_address"] = encode_puzzle_hash(pool_ph, "txch")
+ save_config(wallet_node.root_path, "config.yaml", test_config)
+
+ # Check farmer_fp key
+ sk_dict = await client.check_delete_key(farmer_fp)
+ assert sk_dict["fingerprint"] == farmer_fp
+ assert sk_dict["used_for_farmer_rewards"] is True
+ assert sk_dict["used_for_pool_rewards"] is False
+
+ # Check pool_fp key
+ sk_dict = await client.check_delete_key(pool_fp)
+ assert sk_dict["fingerprint"] == pool_fp
+ assert sk_dict["used_for_farmer_rewards"] is False
+ assert sk_dict["used_for_pool_rewards"] is True
+
+ # Check unknown key
+ sk_dict = await client.check_delete_key(123456, 10)
+ assert sk_dict["fingerprint"] == 123456
+ assert sk_dict["used_for_farmer_rewards"] is False
+ assert sk_dict["used_for_pool_rewards"] is False
+
+
@pytest.mark.limit_consensus_modes(allowed=[ConsensusMode.PLAIN, ConsensusMode.HARD_FORK_2_0], reason="save time")
@pytest.mark.anyio
async def test_key_and_address_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment):
@@ -1658,67 +1697,32 @@ async def test_key_and_address_endpoints(wallet_rpc_environment: WalletRpcTestEn
sk_dict = await client.get_private_key(pks[1])
assert sk_dict["fingerprint"] == pks[1]
- # Add in reward addresses into farmer and pool for testing delete key checks
- # set farmer to first private key
- sk = await wallet_node.get_key_for_fingerprint(pks[0])
- assert sk is not None
- test_ph = create_puzzlehash_for_pk(master_sk_to_wallet_sk(sk, uint32(0)).get_g1())
+ # test hardened keys
+ await _check_delete_key(client=client, wallet_node=wallet_node, farmer_fp=pks[0], pool_fp=pks[1], observer=False)
+
+ # test observer keys
+ await _check_delete_key(client=client, wallet_node=wallet_node, farmer_fp=pks[0], pool_fp=pks[1], observer=True)
+
+ # set farmer to empty string
with lock_and_load_config(wallet_node.root_path, "config.yaml") as test_config:
- test_config["farmer"]["xch_target_address"] = encode_puzzle_hash(test_ph, "txch")
- # set pool to second private key
- sk = await wallet_node.get_key_for_fingerprint(pks[1])
- assert sk is not None
- test_ph = create_puzzlehash_for_pk(master_sk_to_wallet_sk(sk, uint32(0)).get_g1())
- test_config["pool"]["xch_target_address"] = encode_puzzle_hash(test_ph, "txch")
+ test_config["farmer"]["xch_target_address"] = ""
save_config(wallet_node.root_path, "config.yaml", test_config)
- # Check first key
- sk_dict = await client.check_delete_key(pks[0])
- assert sk_dict["fingerprint"] == pks[0]
- assert sk_dict["used_for_farmer_rewards"] is True
- assert sk_dict["used_for_pool_rewards"] is False
-
- # Check second key
+ # Check key
sk_dict = await client.check_delete_key(pks[1])
assert sk_dict["fingerprint"] == pks[1]
assert sk_dict["used_for_farmer_rewards"] is False
assert sk_dict["used_for_pool_rewards"] is True
- # Check unknown key
- sk_dict = await client.check_delete_key(123456, 10)
- assert sk_dict["fingerprint"] == 123456
- assert sk_dict["used_for_farmer_rewards"] is False
- assert sk_dict["used_for_pool_rewards"] is False
-
- # Add in observer reward addresses into farmer and pool for testing delete key checks
- # set farmer to first private key
- sk = await wallet_node.get_key_for_fingerprint(pks[0])
- assert sk is not None
- test_ph = create_puzzlehash_for_pk(master_sk_to_wallet_sk_unhardened(sk, uint32(0)).get_g1())
+ # set farmer and pool to empty string
with lock_and_load_config(wallet_node.root_path, "config.yaml") as test_config:
- test_config["farmer"]["xch_target_address"] = encode_puzzle_hash(test_ph, "txch")
- # set pool to second private key
- sk = await wallet_node.get_key_for_fingerprint(pks[1])
- assert sk is not None
- test_ph = create_puzzlehash_for_pk(master_sk_to_wallet_sk_unhardened(sk, uint32(0)).get_g1())
- test_config["pool"]["xch_target_address"] = encode_puzzle_hash(test_ph, "txch")
+ test_config["farmer"]["xch_target_address"] = ""
+ test_config["pool"]["xch_target_address"] = ""
save_config(wallet_node.root_path, "config.yaml", test_config)
- # Check first key
+ # Check key
sk_dict = await client.check_delete_key(pks[0])
assert sk_dict["fingerprint"] == pks[0]
- assert sk_dict["used_for_farmer_rewards"] is True
- assert sk_dict["used_for_pool_rewards"] is False
-
- # Check second key
- sk_dict = await client.check_delete_key(pks[1])
- assert sk_dict["fingerprint"] == pks[1]
- assert sk_dict["used_for_farmer_rewards"] is False
- assert sk_dict["used_for_pool_rewards"] is True
-
- # Check unknown key
- sk_dict = await client.check_delete_key(123456, 10)
- assert sk_dict["fingerprint"] == 123456
assert sk_dict["used_for_farmer_rewards"] is False
assert sk_dict["used_for_pool_rewards"] is False
| Delete key from GUI fails [Bug]
### What happened?
Delete key from the GUI fails, but it succeeds from the CLI.
To recreate:
- create a new key with the GUI
- "logout" to show the `wallet keys` diaglog
- Click the three vertical dots on the new key, and click `Delete`
A new spinner appears, but the key is not deleted. A `WARNING` message appears in the log (see below).
I'm seeing the same behavior on Windows and Linux. The wallet is synced when I attempt to delete it. It doesn't seem to matter whether the node is synced.
The stack trace is from 2.0.1, though I'm also seeing it in 2.1.2-rc2.
### Version
2.0.1
### What platform are you using?
Windows
### What ui mode are you using?
GUI
### Relevant log output
```shell
2023-12-05T13:35:19.539 wallet chia.rpc.rpc_server : WARNING Error while handling message: Traceback (most recent call last):
File "chia/rpc/rpc_server.py", line 340, in safe_handle
File "chia/rpc/rpc_server.py", line 331, in ws_api
File "chia/rpc/wallet_rpc_api.py", line 462, in check_delete_key
File "chia/rpc/wallet_rpc_api.py", line 440, in _check_key_used_for_rewards
File "chia/util/bech32m.py", line 120, in decode_puzzle_hash
ValueError: Invalid Address
```
| @danieljperry while the spinner is visible, the GUI is asking the wallet to check if the wallet-to-be-deleted is used for farming or pool rewards. The code in question is:
```
farmer_target = config["farmer"].get("xch_target_address")
pool_target = config["pool"].get("xch_target_address")
address_to_check: List[bytes32] = [decode_puzzle_hash(farmer_target), decode_puzzle_hash(pool_target)]
```
Can you check if your config has valid values for the farmer/pool `xch_target_address`?
Thanks, that was the issue. Both instances of the target address were blank:
`xch_target_address: ''`
When I added a real address, I could delete the key. However, the UX issue still remains. If I am just running a wallet and am not farming, I cannot delete a key from the GUI unless I add a dummy address to my config.yaml. Before the `address_to_check` line, would it be possible to check whether the `farmer_target` and `pool_target` are valid addresses?
Zlatko can u take a look at this if u have some time | 2024-02-08T23:06:07Z | [] | [] |
Chia-Network/chia-blockchain | 17,538 | Chia-Network__chia-blockchain-17538 | [
"10920"
] | 11b41944cf99be7ee3571eac61b8b2e4a5e8a00e | diff --git a/chia/cmds/wallet.py b/chia/cmds/wallet.py
--- a/chia/cmds/wallet.py
+++ b/chia/cmds/wallet.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import asyncio
+import pathlib
from decimal import Decimal
from typing import List, Optional, Sequence
@@ -443,7 +444,13 @@ def add_token_cmd(wallet_rpc_port: Optional[int], asset_id: str, token_name: str
help="A wallet id of an asset to receive and the amount you wish to receive (formatted like wallet_id:amount)",
multiple=True,
)
-@click.option("-p", "--filepath", help="The path to write the generated offer file to", required=True)
+@click.option(
+ "-p",
+ "--filepath",
+ help="The path to write the generated offer file to",
+ required=True,
+ type=click.Path(dir_okay=False, writable=True, path_type=pathlib.Path),
+)
@click.option(
"-m", "--fee", help="A fee to add to the offer when it gets taken, in XCH", default="0", show_default=True
)
@@ -459,7 +466,7 @@ def make_offer_cmd(
fingerprint: int,
offer: Sequence[str],
request: Sequence[str],
- filepath: str,
+ filepath: pathlib.Path,
fee: str,
reuse: bool,
override: bool,
diff --git a/chia/cmds/wallet_funcs.py b/chia/cmds/wallet_funcs.py
--- a/chia/cmds/wallet_funcs.py
+++ b/chia/cmds/wallet_funcs.py
@@ -413,7 +413,7 @@ async def make_offer(
d_fee: Decimal,
offers: Sequence[str],
requests: Sequence[str],
- filepath: str,
+ filepath: pathlib.Path,
reuse_puzhash: Optional[bool],
) -> None:
async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config):
@@ -550,23 +550,24 @@ async def make_offer(
cli_confirm("Confirm (y/n): ", "Not creating offer...")
- offer, trade_record = await wallet_client.create_offer_for_ids(
- offer_dict,
- driver_dict=driver_dict,
- fee=fee,
- tx_config=CMDTXConfigLoader(
- reuse_puzhash=reuse_puzhash,
- ).to_tx_config(units["chia"], config, fingerprint),
- )
- if offer is not None:
- with open(pathlib.Path(filepath), "w") as file:
- file.write(offer.to_bech32())
- print(f"Created offer with ID {trade_record.trade_id}")
- print(
- f"Use chia wallet get_offers --id " f"{trade_record.trade_id} -f {fingerprint} to view status"
+ with filepath.open(mode="w") as file:
+ offer, trade_record = await wallet_client.create_offer_for_ids(
+ offer_dict,
+ driver_dict=driver_dict,
+ fee=fee,
+ tx_config=CMDTXConfigLoader(
+ reuse_puzhash=reuse_puzhash,
+ ).to_tx_config(units["chia"], config, fingerprint),
)
- else:
- print("Error creating offer")
+ if offer is not None:
+ file.write(offer.to_bech32())
+ print(f"Created offer with ID {trade_record.trade_id}")
+ print(
+ f"Use chia wallet get_offers --id "
+ f"{trade_record.trade_id} -f {fingerprint} to view status"
+ )
+ else:
+ print("Error creating offer")
def timestamp_to_time(timestamp: int) -> str:
| diff --git a/tests/cmds/wallet/test_wallet.py b/tests/cmds/wallet/test_wallet.py
--- a/tests/cmds/wallet/test_wallet.py
+++ b/tests/cmds/wallet/test_wallet.py
@@ -4,6 +4,7 @@
from typing import Any, Dict, List, Optional, Tuple, Union, cast
import pkg_resources
+import pytest
from chia_rs import Coin, G2Element
from chia.server.outbound_message import NodeType
@@ -627,6 +628,59 @@ async def set_cat_name(self, wallet_id: int, name: str) -> None:
test_rpc_clients.wallet_rpc_client.check_log(expected_calls)
+def test_make_offer_bad_filename(
+ capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path], tmp_path: Path
+) -> None:
+ _, root_dir = get_test_cli_clients
+
+ request_cat_id = get_bytes32(2)
+ request_nft_id = get_bytes32(2)
+ request_nft_addr = encode_puzzle_hash(request_nft_id, "nft")
+ # we offer xch and a random cat via wallet id and request a random cat, nft via coin and tail
+ command_args_dir = [
+ "wallet",
+ "make_offer",
+ FINGERPRINT_ARG,
+ f"-p{str(tmp_path)}",
+ "--reuse",
+ "-m1",
+ "--offer",
+ "1:10",
+ "--offer",
+ "3:100",
+ "--request",
+ f"{request_cat_id.hex()}:10",
+ "--request",
+ f"{request_nft_addr}:1",
+ ]
+
+ test_file: Path = tmp_path / "test.offer"
+ test_file.touch(mode=0o400)
+
+ command_args_unwritable = [
+ "wallet",
+ "make_offer",
+ FINGERPRINT_ARG,
+ f"-p{str(test_file)}",
+ "--reuse",
+ "-m1",
+ "--offer",
+ "1:10",
+ "--offer",
+ "3:100",
+ "--request",
+ f"{request_cat_id.hex()}:10",
+ "--request",
+ f"{request_nft_addr}:1",
+ ]
+
+ with pytest.raises(AssertionError, match=r".*Invalid value for '-p' / '--filepath.*is a directory.*"):
+ run_cli_command_and_assert(capsys, root_dir, command_args_dir, [""])
+
+ with pytest.raises(AssertionError, match=r".*Invalid value for '-p' / '--filepath.*is not writable.*"):
+ run_cli_command_and_assert(capsys, root_dir, command_args_unwritable, [""])
+
+
def test_make_offer(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path], tmp_path: Path) -> None:
test_rpc_clients, root_dir = get_test_cli_clients
| [Bug] missing output filename causes exception, but still creates offer and does not show to user
### What happened?
```
~/chia-blockchain$ chia wallet make_offer -f 1849xxxx -o 2:2 -r 1:0.0000001 -p /home/jm -m 0.000000000005
Creating Offer
--------------
OFFERING:
- 2 Chia Holiday 2021 Token (2000 mojos)
REQUESTING:
- 0.0000001 XCH (100000 mojos)
Confirm (y/n): y
Exception from 'wallet' [Errno 21] Is a directory: '/home/jm'
```
repeat the command again
```
Exception from 'wallet' {'error': "Error creating offer: Can't make this transaction at the moment. Waiting for the change from the previous transaction.", 'success': False}
```
able to recover offer with `chia wallet get_offers -id <id> -p foo.offer`
### Version
1.3.1
### What platform are you using?
Linux
### What ui mode are you using?
CLI
### Relevant log output
_No response_
| Repro'd against 1.6.0
This issue has not been updated in 14 days and is now flagged as stale. If this issue is still affecting you and in need of further review, please comment on it with an update to keep it from auto closing in 7 days.
This issue was automatically closed because it has been flagged as stale, and subsequently passed 7 days with no further activity from the submitter or watchers. | 2024-02-09T21:29:39Z | [] | [] |
pypi/warehouse | 82 | pypi__warehouse-82 | [
"64"
] | 925d8b7958d756eabd0f70b99ecf82416dce442a | diff --git a/warehouse/__init__.py b/warehouse/__init__.py
--- a/warehouse/__init__.py
+++ b/warehouse/__init__.py
@@ -16,10 +16,10 @@
from warehouse.__about__ import (
__title__, __summary__, __uri__, __version__, __author__, __email__,
- __license__, __copyright__,
+ __license__, __copyright__, __build__,
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
- "__email__", "__license__", "__copyright__",
+ "__email__", "__license__", "__copyright__", "__build__",
]
diff --git a/warehouse/application.py b/warehouse/application.py
--- a/warehouse/application.py
+++ b/warehouse/application.py
@@ -32,6 +32,7 @@
import warehouse.cli
from warehouse.http import Request
+from warehouse.middleware import PoweredBy
from warehouse.utils import AttributeDict, merge_dict, convert_to_attr_dict
@@ -78,6 +79,12 @@ def __init__(self, config, engine=None):
}),
)
+ # Add our Powered By Middleware
+ self.wsgi_app = PoweredBy(self.wsgi_app, "Warehouse {} ({})".format(
+ warehouse.__version__,
+ warehouse.__build__,
+ ))
+
def __call__(self, environ, start_response):
"""
Shortcut for :attr:`wsgi_app`.
diff --git a/warehouse/middleware.py b/warehouse/middleware.py
new file mode 100644
--- /dev/null
+++ b/warehouse/middleware.py
@@ -0,0 +1,28 @@
+# Copyright 2013 Donald Stufft
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import, division, print_function
+
+
+class PoweredBy(object):
+
+ def __init__(self, app, powered_by):
+ self.app = app
+ self.powered_by = powered_by
+
+ def __call__(self, environ, start_response):
+ def _start_response(status, headers, exc_info=None):
+ headers.append(("X-Powered-By", self.powered_by))
+ return start_response(status, headers, exc_info)
+
+ return self.app(environ, _start_response)
| diff --git a/tests/test_application.py b/tests/test_application.py
--- a/tests/test_application.py
+++ b/tests/test_application.py
@@ -101,7 +101,7 @@ def test_wsgi_app(app, monkeypatch):
assert urls.bind_to_environ.calls == [pretend.call(environ)]
assert import_module.calls == [pretend.call("warehouse.fake")]
assert fake_view.calls == [pretend.call(app, mock.ANY)]
- assert response.calls == [pretend.call(environ, start_response)]
+ assert response.calls == [pretend.call(environ, mock.ANY)]
def test_wsgi_app_exception(app, monkeypatch):
diff --git a/tests/test_middleware.py b/tests/test_middleware.py
new file mode 100644
--- /dev/null
+++ b/tests/test_middleware.py
@@ -0,0 +1,35 @@
+# Copyright 2013 Donald Stufft
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import, division, print_function
+from __future__ import unicode_literals
+
+import mock
+import pretend
+
+from warehouse.middleware import PoweredBy
+
+
+def test_powered_by():
+ app = pretend.call_recorder(lambda environ, start_response: start_response)
+ powered_by = PoweredBy(app, "Test Powered By")
+
+ environ = pretend.stub()
+ start_response = pretend.call_recorder(lambda *a: None)
+
+ powered_by(environ, start_response)(200, [])
+
+ assert app.calls == [pretend.call(environ, mock.ANY)]
+ assert start_response.calls == [
+ pretend.call(200, [("X-Powered-By", "Test Powered By")], None),
+ ]
| X-Powered-By Headers
It'd be useful to add a X-Powered-By header to enable easily checking to see what version of Warehouse is running.
| 2013-10-20T23:49:44Z | [] | [] |
|
pypi/warehouse | 90 | pypi__warehouse-90 | [
"75"
] | a1533e6f8711fb4bdfa1bfeba54775f1db49aa72 | diff --git a/warehouse/legacy/simple.py b/warehouse/legacy/simple.py
--- a/warehouse/legacy/simple.py
+++ b/warehouse/legacy/simple.py
@@ -15,7 +15,6 @@
from __future__ import unicode_literals
import os.path
-import re
from werkzeug.exceptions import NotFound
from werkzeug.security import safe_join
@@ -23,10 +22,13 @@
from warehouse.helpers import url_for
from warehouse.http import Response
-from warehouse.utils import cache, get_mimetype, render_response
+from warehouse.utils import (
+ cache, fastly, get_mimetype, normalize, render_response,
+)
@cache("simple")
+@fastly("simple-index")
def index(app, request):
projects = app.models.packaging.all_projects()
resp = render_response(
@@ -34,10 +36,6 @@ def index(app, request):
projects=projects,
)
- # Add our surrogate key headers for Fastly
- if app.config.fastly:
- resp.headers.add("Surrogate-Key", "simple-index")
-
# Add a header that points to the last serial
serial = app.models.packaging.get_last_serial()
resp.headers.add("X-PyPI-Last-Serial", serial)
@@ -46,6 +44,7 @@ def index(app, request):
@cache("simple")
+@fastly("simple", "simple~{project_name!n}")
def project(app, request, project_name):
# Get the real project name for this project
project = app.models.packaging.get_project(project_name)
@@ -53,9 +52,6 @@ def project(app, request, project_name):
if project is None:
raise NotFound("{} does not exist".format(project_name))
- # Normalize the project name
- normalized = re.sub("_", "-", project.name, re.I).lower()
-
# Generate the Package URLs for the packages we've hosted
file_urls = app.models.packaging.get_file_urls(project.name)
@@ -97,13 +93,6 @@ def project(app, request, project_name):
external_urls=external_urls,
)
- # Add our surrogate key headers for Fastly
- if app.config.fastly:
- resp.headers.add(
- "Surrogate-Key",
- " ".join(["simple", "simple~{}".format(normalized)]),
- )
-
# Add a header that points to the last serial
serial = app.models.packaging.get_last_serial(project.name)
resp.headers.add("X-PyPI-Last-Serial", serial)
@@ -144,7 +133,7 @@ def package(app, request, path):
# Get the project name and normalize it
lookup_filename = filename[:-4] if filename.endswith(".asc") else filename
project = app.models.packaging.get_project_for_filename(lookup_filename)
- normalized = re.sub("_", "-", project.name, re.I).lower()
+ normalized = normalize(project.name)
# Get the MD5 hash of the file
content_md5 = app.models.packaging.get_filename_md5(filename)
@@ -152,13 +141,12 @@ def package(app, request, path):
headers = {}
# Add in additional headers if we're using Fastly
- if app.config.fastly:
- headers.update({
- "Surrogate-Key": " ".join([
- "package",
- "package~{}".format(normalized),
- ]),
- })
+ headers.update({
+ "Surrogate-Key": " ".join([
+ "package",
+ "package~{}".format(normalized),
+ ]),
+ })
# Look up the last serial for this file
serial = app.models.packaging.get_last_serial(project.name)
diff --git a/warehouse/ui/views.py b/warehouse/ui/views.py
--- a/warehouse/ui/views.py
+++ b/warehouse/ui/views.py
@@ -14,18 +14,17 @@
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
-import re
-
import jinja2
from recliner import htmlize
from werkzeug.exceptions import NotFound
from warehouse.helpers import url_for
-from warehouse.utils import cache, redirect, render_response
+from warehouse.utils import cache, fastly, redirect, render_response
@cache("project_detail")
+@fastly("project-detail", "project-detail~{project_name!n}")
def project_detail(app, request, project_name, version=None):
# Get the real project name for this project
project = app.models.packaging.get_project(project_name)
@@ -33,9 +32,6 @@ def project_detail(app, request, project_name, version=None):
if project is None:
raise NotFound("Cannot find a project named {}".format(project_name))
- # Normalize the project name
- normalized = re.sub("_", "-", project.name, re.I).lower()
-
# Look up the version of the given project
versions = app.models.packaging.get_project_versions(project.name)
@@ -52,7 +48,7 @@ def project_detail(app, request, project_name, version=None):
if project.name != project_name:
# We've found the project, and the version exists, but the project name
# isn't quite right so we'll redirect them to the correct one.
- resp = redirect(
+ return redirect(
url_for(
request,
"warehouse.ui.views.project_detail",
@@ -62,18 +58,6 @@ def project_detail(app, request, project_name, version=None):
code=301,
)
- # Add our surrogate key headers for Fastly
- if app.config.fastly:
- resp.headers.add(
- "Surrogate-Key",
- " ".join([
- "project-detail",
- "project-detail~{}".format(normalized),
- ]),
- )
-
- return resp
-
if version is None:
# If there's no version specified, then we use the latest version
version = versions[0]
@@ -100,7 +84,7 @@ def project_detail(app, request, project_name, version=None):
# Mark our description_html as safe as it's already been cleaned by bleach
description_html = jinja2.Markup(description_html)
- resp = render_response(
+ return render_response(
app, request, "projects/detail.html",
project=project,
release=release,
@@ -116,15 +100,3 @@ def project_detail(app, request, project_name, version=None):
documentation=app.models.packaging.get_documentation_url(project.name),
bugtracker=app.models.packaging.get_bugtrack_url(project.name),
)
-
- # Add our surrogate key headers for Fastly
- if app.config.fastly:
- resp.headers.add(
- "Surrogate-Key",
- " ".join([
- "project-detail",
- "project-detail~{}".format(normalized),
- ]),
- )
-
- return resp
diff --git a/warehouse/utils.py b/warehouse/utils.py
--- a/warehouse/utils.py
+++ b/warehouse/utils.py
@@ -17,6 +17,8 @@
import collections
import functools
import mimetypes
+import re
+import string
from werkzeug.urls import iri_to_uri
from werkzeug.utils import escape
@@ -148,3 +150,39 @@ def redirect(location, code=302):
(escape(location), display_location), code, mimetype="text/html")
response.headers["Location"] = location
return response
+
+
+def normalize(value):
+ return re.sub("_", "-", value, re.I).lower()
+
+
+class FastlyFormatter(string.Formatter):
+
+ def convert_field(self, value, conversion):
+ if conversion == "n":
+ return normalize(value)
+ return super(FastlyFormatter, self).convert_field(value, conversion)
+
+
+def fastly(*keys):
+ def decorator(fn):
+ @functools.wraps(fn)
+ def wrapper(app, request, *args, **kwargs):
+ # Get the response from the view
+ resp = fn(app, request, *args, **kwargs)
+
+ # Resolve our surrogate keys
+ ctx = {"app": app, "request": request}
+ ctx.update(kwargs)
+ surrogate_keys = [
+ FastlyFormatter().format(key, **ctx)
+ for key in keys
+ ]
+
+ # Set our Fastly Surrogate-Key header
+ resp.headers["Surrogate-Key"] = " ".join(surrogate_keys)
+
+ # Return the modified response
+ return resp
+ return wrapper
+ return decorator
| diff --git a/tests/legacy/test_simple.py b/tests/legacy/test_simple.py
--- a/tests/legacy/test_simple.py
+++ b/tests/legacy/test_simple.py
@@ -27,8 +27,7 @@
from warehouse.legacy import simple
-@pytest.mark.parametrize("fastly", [True, False])
-def test_index(fastly, monkeypatch):
+def test_index(monkeypatch):
response = pretend.stub(status_code=200, headers=Headers())
render = pretend.call_recorder(lambda *a, **k: response)
monkeypatch.setattr(simple, "render_response", render)
@@ -37,7 +36,6 @@ def test_index(fastly, monkeypatch):
app = pretend.stub(
config=pretend.stub(
- fastly=fastly,
cache=pretend.stub(browser=False, varnish=False),
),
models=pretend.stub(
@@ -53,11 +51,7 @@ def test_index(fastly, monkeypatch):
assert resp is response
assert resp.headers["X-PyPI-Last-Serial"] == "9999"
-
- if fastly:
- assert resp.headers["Surrogate-Key"] == "simple-index"
- else:
- assert "Surrogate-Key" not in resp.headers
+ assert resp.headers["Surrogate-Key"] == "simple-index"
assert render.calls == [
pretend.call(
@@ -69,15 +63,12 @@ def test_index(fastly, monkeypatch):
@pytest.mark.parametrize(
- (
- "fastly", "project_name", "hosting_mode", "release_urls",
- "e_project_urls",
- ),
+ ("project_name", "hosting_mode", "release_urls", "e_project_urls"),
[
- (True, "foo", "pypi-explicit", {}, []),
- (False, "foo", "pypi-explicit", {}, []),
+ ("foo", "pypi-explicit", {}, []),
+ ("foo", "pypi-explicit", {}, []),
(
- True, "foo", "pypi-scrape",
+ "foo", "pypi-scrape",
{
"1.0": (
"http://example.com/home/",
@@ -97,10 +88,10 @@ def test_index(fastly, monkeypatch):
},
],
),
- (True, "foo", "pypi-scrape", {"1.0": ("UNKNOWN", "UNKNOWN")}, []),
+ ("foo", "pypi-scrape", {"1.0": ("UNKNOWN", "UNKNOWN")}, []),
],
)
-def test_project(fastly, project_name, hosting_mode, release_urls,
+def test_project(project_name, hosting_mode, release_urls,
e_project_urls, monkeypatch):
response = pretend.stub(status_code=200, headers=Headers())
render = pretend.call_recorder(lambda *a, **k: response)
@@ -113,7 +104,6 @@ def test_project(fastly, project_name, hosting_mode, release_urls,
app = pretend.stub(
config=pretend.stub(
- fastly=fastly,
cache=pretend.stub(browser=False, varnish=False),
),
models=pretend.stub(
@@ -135,12 +125,8 @@ def test_project(fastly, project_name, hosting_mode, release_urls,
assert resp is response
assert resp.headers["Link"] == "</foo/>; rel=canonical"
-
- if fastly:
- surrogate = "simple simple~{}".format(project_name)
- assert resp.headers["Surrogate-Key"] == surrogate
- else:
- assert "Surrogate-Key" not in resp.headers
+ assert (resp.headers["Surrogate-Key"] ==
+ "simple simple~{}".format(project_name))
assert render.calls == [
pretend.call(
@@ -191,17 +177,13 @@ def test_project_not_found():
assert app.models.packaging.get_project.calls == [pretend.call("foo")]
-@pytest.mark.parametrize(("fastly", "serial", "md5_hash"), [
- (True, 999, "d41d8cd98f00b204e9800998ecf8427f"),
- (False, 999, "d41d8cd98f00b204e9800998ecf8427f"),
- (True, None, "d41d8cd98f00b204e9800998ecf8427f"),
- (False, None, "d41d8cd98f00b204e9800998ecf8427f"),
- (True, 999, None),
- (False, 999, None),
- (True, None, None),
- (False, None, None),
+@pytest.mark.parametrize(("serial", "md5_hash"), [
+ (999, "d41d8cd98f00b204e9800998ecf8427f"),
+ (None, "d41d8cd98f00b204e9800998ecf8427f"),
+ (999, None),
+ (None, None),
])
-def test_package(fastly, serial, md5_hash, monkeypatch):
+def test_package(serial, md5_hash, monkeypatch):
safe_join = pretend.call_recorder(
lambda *a, **k: "/tmp/packages/any/t/test-1.0.tar.gz"
)
@@ -225,7 +207,6 @@ def test_package(fastly, serial, md5_hash, monkeypatch):
app = pretend.stub(
config=pretend.stub(
- fastly=fastly,
cache=pretend.stub(browser=False, varnish=False),
paths=pretend.stub(packages="/tmp"),
),
@@ -246,11 +227,7 @@ def test_package(fastly, serial, md5_hash, monkeypatch):
else:
assert "X-PyPI-Last-Serial" not in resp.headers
- if fastly:
- assert resp.headers["Surrogate-Key"] == "package package~test"
- else:
- assert "Surrogate-Key" not in resp.headers
-
+ assert resp.headers["Surrogate-Key"] == "package package~test"
assert resp.headers["Content-Length"] == "54321"
assert safe_join.calls == [
diff --git a/tests/test_utils.py b/tests/test_utils.py
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -20,8 +20,8 @@
import six
from warehouse.utils import (
- AttributeDict, convert_to_attr_dict, merge_dict, render_response, cache,
- get_wsgi_application, get_mimetype, redirect
+ AttributeDict, FastlyFormatter, convert_to_attr_dict, merge_dict,
+ render_response, cache, get_wsgi_application, get_mimetype, redirect
)
@@ -157,3 +157,8 @@ def test_redirect_unicode():
resp = redirect(six.text_type("/foo/"))
assert resp.status_code == 302
assert resp.headers["Location"] == "/foo/"
+
+
+def test_fastly_formatter():
+ assert FastlyFormatter().format("{0}", "Foo") == "Foo"
+ assert FastlyFormatter().format("{0!n}", "Foo") == "foo"
diff --git a/tests/ui/test_views.py b/tests/ui/test_views.py
--- a/tests/ui/test_views.py
+++ b/tests/ui/test_views.py
@@ -71,15 +71,13 @@ def test_project_detail_no_versions():
]
-@pytest.mark.parametrize("fastly", [True, False])
-def test_project_detail_redirects(fastly):
+def test_project_detail_redirects():
app = pretend.stub(
config=pretend.stub(
cache=pretend.stub(
browser=False,
varnish=False,
),
- fastly=fastly,
),
models=pretend.stub(
packaging=pretend.stub(
@@ -103,16 +101,13 @@ def test_project_detail_redirects(fastly):
project_name = "test-Project"
normalized = "test-project"
- resp = project_detail(app, request, project_name)
+ resp = project_detail(app, request, project_name=project_name)
assert resp.status_code == 301
assert resp.headers["Location"] == "/projects/test-project/"
- if fastly:
- assert resp.headers["Surrogate-Key"] == \
- "project-detail project-detail~{}".format(normalized)
- else:
- assert "Surrogate-Key" not in resp.headers
+ assert resp.headers["Surrogate-Key"] == \
+ "project-detail project-detail~{}".format(normalized)
assert app.models.packaging.get_project.calls == [
pretend.call("test-Project"),
@@ -163,7 +158,7 @@ def test_project_detail_invalid_version():
]
-@pytest.mark.parametrize(("version", "description", "fastly"), [
+@pytest.mark.parametrize(("version", "description"), [
(
None,
textwrap.dedent("""
@@ -172,17 +167,6 @@ def test_project_detail_invalid_version():
This is a test project
"""),
- True,
- ),
- (
- None,
- textwrap.dedent("""
- Test Project
- ============
-
- This is a test project
- """),
- False,
),
(
"1.0",
@@ -192,24 +176,11 @@ def test_project_detail_invalid_version():
This is a test project
"""),
- True,
),
- (
- "1.0",
- textwrap.dedent("""
- Test Project
- ============
-
- This is a test project
- """),
- False,
- ),
- (None, ".. code-fail::\n wat", True),
- (None, ".. code-fail::\n wat", False),
- ("1.0", ".. code-fail::\n wat", True),
- ("1.0", ".. code-fail::\n wat", False),
+ (None, ".. code-fail::\n wat"),
+ ("1.0", ".. code-fail::\n wat"),
])
-def test_project_detail_valid(version, description, fastly):
+def test_project_detail_valid(version, description):
release = {
"description": description,
}
@@ -224,7 +195,6 @@ def test_project_detail_valid(version, description, fastly):
browser=False,
varnish=False,
),
- fastly=fastly,
),
models=pretend.stub(
packaging=pretend.stub(
@@ -262,15 +232,17 @@ def test_project_detail_valid(version, description, fastly):
project_name = "test-project"
normalized = "test-project"
- resp = project_detail(app, request, project_name, version)
+ resp = project_detail(
+ app,
+ request,
+ project_name=project_name,
+ version=version,
+ )
assert resp.status_code == 200
- if fastly:
- assert resp.headers["Surrogate-Key"] == \
- "project-detail project-detail~{}".format(normalized)
- else:
- assert "Surrogate-Key" not in resp.headers
+ assert resp.headers["Surrogate-Key"] == \
+ "project-detail project-detail~{}".format(normalized)
assert app.models.packaging.get_project.calls == [
pretend.call("test-project"),
| Implement a Fastly Utility Decorator
Copy and Pasting the Surrogate-Key handling code all over the place is far from optimal, we should figure out how to turn it into a decorator (ideally) or at the very least make it a utility function that can be called.
| 2013-10-21T12:50:55Z | [] | [] |
|
pypi/warehouse | 93 | pypi__warehouse-93 | [
"5"
] | c7cc3aab6fff825b7f56e34102e1b9e86a822541 | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -68,6 +68,7 @@ def recursive_glob(path, pattern, cutdirs=0):
"alembic",
"babel",
"enum34",
+ "guard",
"Jinja2",
"psycopg2",
"PyYAML",
diff --git a/warehouse/application.py b/warehouse/application.py
--- a/warehouse/application.py
+++ b/warehouse/application.py
@@ -21,6 +21,7 @@
import babel.dates
import babel.support
+import guard
import jinja2
import redis as redispy
@@ -117,7 +118,6 @@ def __init__(self, config, engine=None, redis=None):
asset_config = self.config.assets
asset_config.setdefault("debug", self.config.debug)
asset_config.setdefault("auto_build", self.config.debug)
- asset_config.setdefault("less_run_in_debug", False)
self.templates.assets_environment = AssetsEnvironment(**asset_config)
@@ -136,12 +136,22 @@ def __init__(self, config, engine=None, redis=None):
warehouse.__build__,
))
+ # Add our Content Security Policy Middleware
+ self.wsgi_app = guard.ContentSecurityPolicy(
+ self.wsgi_app,
+ self.config.security.csp,
+ )
+
# Serve the static files if we're in debug
if self.config.debug:
self.wsgi_app = SharedDataMiddleware(
self.wsgi_app,
{"/static/": static_path},
)
+ self.wsgi_app = SharedDataMiddleware(
+ self.wsgi_app,
+ {"/static/": self.config.assets.directory},
+ )
def __call__(self, environ, start_response):
"""
| diff --git a/tests/test_application.py b/tests/test_application.py
--- a/tests/test_application.py
+++ b/tests/test_application.py
@@ -42,6 +42,9 @@ def test_basic_instantiation():
"redis": {
"url": "redis://localhost:6379/0"
},
+ "security": {
+ "csp": {},
+ },
})
| Implement CSP
Ideally the entire site will be protected by a CSP policy. However because of the admin we might need to exclude /admin from that, at least until the Django admin no longer uses inline javascript or CSS.
| We no longer use Django so concerns about the Django admin are no longer a factor.
| 2013-10-21T15:23:48Z | [] | [] |
pypi/warehouse | 110 | pypi__warehouse-110 | [
"107"
] | 1c88e1932603c4e3d5fef05916746d10d7be1b11 | diff --git a/warehouse/application.py b/warehouse/application.py
--- a/warehouse/application.py
+++ b/warehouse/application.py
@@ -18,6 +18,7 @@
import collections
import importlib
import os.path
+import logging.config
import babel.dates
import babel.support
@@ -153,6 +154,9 @@ def __init__(self, config, engine=None, redis=None):
{"/static/": self.config.assets.directory},
)
+ # configure logging
+ logging.config.dictConfig(self.config.logging)
+
def __call__(self, environ, start_response):
"""
Shortcut for :attr:`wsgi_app`.
| diff --git a/tests/test_application.py b/tests/test_application.py
--- a/tests/test_application.py
+++ b/tests/test_application.py
@@ -45,6 +45,9 @@ def test_basic_instantiation():
"security": {
"csp": {},
},
+ "logging": {
+ "version": 1,
+ },
})
| Logging configuration
There's already some logging being done by the app - the web requests. There will be more logging done explicitly by code as users do things (dumb things _or_ good things).
I'm going to add a new section to the configuration file called logging which will have the following structure (closely mirroring the standard logging configuration file structure, but not requiring us to actually have a separate INI syntax file).
``` yaml
logging:
formatters:
simpleFormater:
format: '%(asctime)s - %(levelname)s: %(message)s'
datefmt: '%Y/%m/%d %H:%M:%S'
handlers:
console:
class: logging.StreamHandler
formatter: simpleFormater
level: DEBUG
stream: ext://sys.stdout
file:
class : logging.FileHandler
formatter: simpleFormater
level: WARNING
filename: output.log
loggers:
clogger:
level: DEBUG
handlers: [console]
flogger:
level: WARNING
handlers: [file]
root:
level: DEBUG
handlers: [console, file]
```
This example is way more complex than I imagine any given configuration would actually be.
This configuration is then loaded with some code like:
``` python
# config = loaded yaml config
logging_conf = config['logging']
logging_conf.setdefault('version', 1)
logging.config.dictConfig(logging_conf)
```
For a more concrete dev configuration I anticipate something like:
``` yaml
logging:
handlers:
console:
class: logging.StreamHandler
level: DEBUG
stream: ext://sys.stdout
root:
level: DEBUG
handlers: [console]
```
(which will hopefully work; actual results may vary of course given it's the logging module we're talking about)
| 2013-10-29T02:52:05Z | [] | [] |
|
pypi/warehouse | 133 | pypi__warehouse-133 | [
"131"
] | f9de953b361ae478d06a37d63b1a4cc297dc5976 | diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py
--- a/warehouse/packaging/views.py
+++ b/warehouse/packaging/views.py
@@ -74,12 +74,15 @@ def project_detail(app, request, project_name, version=None):
# Get the release data for the version
release = app.models.packaging.get_release(project.name, version)
- # Render the project description
- description_html = htmlize(release["description"])
+ if release.get("description"):
+ # Render the project description
+ description_html = htmlize(release["description"])
- # If our description wasn't able to be rendered, wrap it in <pre></pre>
- if not description_html.rendered:
- description_html = "<pre>" + description_html + "</pre>"
+ # If our description wasn't able to be rendered, wrap it in <pre></pre>
+ if not description_html.rendered:
+ description_html = "<pre>" + description_html + "</pre>"
+ else:
+ description_html = ""
# Mark our description_html as safe as it's already been cleaned by bleach
description_html = jinja2.Markup(description_html)
| diff --git a/tests/packaging/test_views.py b/tests/packaging/test_views.py
--- a/tests/packaging/test_views.py
+++ b/tests/packaging/test_views.py
@@ -179,6 +179,8 @@ def test_project_detail_invalid_version():
),
(None, ".. code-fail::\n wat"),
("1.0", ".. code-fail::\n wat"),
+ (None, None),
+ ("1.0", None),
])
def test_project_detail_valid(version, description):
release = {
| Handle the case where there is no long_description
Long description can be `None` instead of `""`. Right now this cases an exception.
```
Stacktrace (most recent call last):
File "raven/middleware.py", line 31, in __call__
iterable = self.application(environ, start_response)
File "site-packages/guard.py", line 62, in __call__
return self.application(environ, _start_response)
File "warehouse/middleware.py", line 28, in __call__
return self.app(environ, _start_response)
File "werkzeug/wsgi.py", line 40, in <lambda>
return update_wrapper(lambda *a: f(*a)(*a[-2:]), f)
File "warehouse/application.py", line 260, in wsgi_app
return view(self, request, **kwargs)
File "warehouse/utils.py", line 90, in wrapper
resp = fn(app, request, *args, **kwargs)
File "warehouse/utils.py", line 173, in wrapper
resp = fn(app, request, *args, **kwargs)
File "warehouse/packaging/views.py", line 78, in project_detail
description_html = htmlize(release["description"])
File "recliner/renderer.py", line 152, in htmlize
html = render(text)
File "recliner/renderer.py", line 108, in render
settings_overrides=settings,
File "docutils/core.py", line 448, in publish_parts
enable_exit_status=enable_exit_status)
File "docutils/core.py", line 662, in publish_programmatically
output = pub.publish(enable_exit_status=enable_exit_status)
File "docutils/core.py", line 217, in publish
self.settings)
File "docutils/readers/__init__.py", line 71, in read
self.input = self.source.read()
File "docutils/io.py", line 426, in read
return self.decode(self.source)
File "docutils/io.py", line 99, in decode
data_encoding = self.determine_encoding_from_data(data)
File "docutils/io.py", line 142, in determine_encoding_from_data
if data.startswith(start_bytes):
```
Resolving this issue should resolve https://app.getsentry.com/pypi/warehouse/group/9475461/ as well.
| 2013-11-02T23:57:48Z | [] | [] |
|
pypi/warehouse | 137 | pypi__warehouse-137 | [
"134"
] | 585cb84faaf8d7968859d09aaa6eb6e64c15fcc8 | diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py
--- a/warehouse/packaging/models.py
+++ b/warehouse/packaging/models.py
@@ -133,6 +133,7 @@ def get_users_for_project(self, project):
query = (
select(
[users.c.username, emails.c.email],
+ distinct=users.c.username,
from_obj=users.outerjoin(
emails, emails.c.user_id == users.c.id,
),
@@ -141,10 +142,7 @@ def get_users_for_project(self, project):
users.c.username == roles.c.user_name,
roles.c.package_name == project,
))
- .order_by(
- roles.c.role_name.desc(),
- func.lower(roles.c.user_name),
- )
+ .order_by(users.c.username)
)
with self.engine.connect() as conn:
| diff --git a/tests/packaging/test_models.py b/tests/packaging/test_models.py
--- a/tests/packaging/test_models.py
+++ b/tests/packaging/test_models.py
@@ -306,6 +306,11 @@ def test_get_users_for_project(dbapp):
user_name="test-user2",
role_name="Maintainer",
))
+ dbapp.engine.execute(roles.insert().values(
+ package_name="test-project",
+ user_name="test-user",
+ role_name="Maintainer",
+ ))
dbapp.engine.execute(roles.insert().values(
package_name="test-project",
user_name="a-test-user",
@@ -313,8 +318,8 @@ def test_get_users_for_project(dbapp):
))
assert dbapp.models.packaging.get_users_for_project("test-project") == [
- {"username": "test-user", "email": None},
{"username": "a-test-user", "email": None},
+ {"username": "test-user", "email": None},
{"username": "test-user2", "email": "test@example.com"},
]
| Deduplicate Mainainers
If a user has both the maintainer and the owner role for a package they will show up twice in the Maintainers list. This should be set to be a distinct query so that they only show up once.
| 2013-11-03T01:55:00Z | [] | [] |
|
pypi/warehouse | 161 | pypi__warehouse-161 | [
"160"
] | c56e630f0f5baf79369029e19de7cf299b5d238f | diff --git a/warehouse/legacy/pypi.py b/warehouse/legacy/pypi.py
--- a/warehouse/legacy/pypi.py
+++ b/warehouse/legacy/pypi.py
@@ -17,12 +17,12 @@
from warehouse.helpers import url_for
from werkzeug.utils import redirect
-from . import xmlrpc
+from warehouse.legacy import xmlrpc
def pypi(app, request):
# if the MIME type of the request is XML then we go into XML-RPC mode
- if request.headers['Content-Type'] == 'text/xml':
+ if request.headers.get('Content-Type') == 'text/xml':
return xmlrpc.handle_request(app, request)
# no XML-RPC and no :action means we render the index, or at least we
| diff --git a/tests/legacy/test_pypi.py b/tests/legacy/test_pypi.py
--- a/tests/legacy/test_pypi.py
+++ b/tests/legacy/test_pypi.py
@@ -15,14 +15,21 @@
from __future__ import unicode_literals
import pretend
+import pytest
from warehouse.legacy import pypi
-def test_pypi_index():
+@pytest.mark.parametrize("content_type", [None, "text/html", "__empty__"])
+def test_pypi_index(content_type):
+ headers = {}
+
+ if content_type != "__empty__":
+ headers["Content-Type"] = content_type
+
app = pretend.stub()
request = pretend.stub(
- headers={'Content-Type': None}, # GET request has no content-type
+ headers=headers,
url_adapter=pretend.stub(
build=pretend.call_recorder(
lambda *a, **kw: "/",
| Acessing /pypi without setting CONTENT_TYPE results in a 500
https://app.getsentry.com/pypi/warehouse/group/10206920/
``` pytb
Stacktrace (most recent call last):
File "raven/middleware.py", line 31, in __call__
iterable = self.application(environ, start_response)
File "site-packages/guard.py", line 62, in __call__
return self.application(environ, _start_response)
File "warehouse/middleware.py", line 28, in __call__
return self.app(environ, _start_response)
File "werkzeug/wsgi.py", line 40, in <lambda>
return update_wrapper(lambda *a: f(*a)(*a[-2:]), f)
File "warehouse/application.py", line 251, in wsgi_app
return view(self, request, **kwargs)
File "warehouse/legacy/pypi.py", line 25, in pypi
if request.headers['Content-Type'] == 'text/xml':
File "werkzeug/datastructures.py", line 1268, in __getitem__
return _unicodify_header_value(self.environ[key])
```
| 2013-11-17T18:32:33Z | [] | [] |
|
pypi/warehouse | 165 | pypi__warehouse-165 | [
"163"
] | 6ea343d0ba04bcb66a6b56732b0a6def04000b56 | diff --git a/warehouse/helpers.py b/warehouse/helpers.py
--- a/warehouse/helpers.py
+++ b/warehouse/helpers.py
@@ -32,6 +32,9 @@ def url_for(request, endpoint, **values):
def gravatar_url(email, size=80):
+ if email is None:
+ email = ""
+
email_hash = hashlib.md5(email.strip().lower()).hexdigest()
url = "https://secure.gravatar.com/avatar/{}".format(email_hash)
| diff --git a/tests/test_helpers.py b/tests/test_helpers.py
--- a/tests/test_helpers.py
+++ b/tests/test_helpers.py
@@ -22,20 +22,28 @@
from warehouse.helpers import gravatar_url, url_for, static_url
-@pytest.mark.parametrize(("kwargs", "expected"), [
+@pytest.mark.parametrize(("email", "kwargs", "expected"), [
(
+ "test-user@example.com",
{},
("https://secure.gravatar.com/avatar/3664adb7d1eea0bd7d0b134577663889"
"?size=80"),
),
(
+ "test-user@example.com",
{"size": 1000},
("https://secure.gravatar.com/avatar/3664adb7d1eea0bd7d0b134577663889"
"?size=1000"),
),
+ (
+ None,
+ {},
+ ("https://secure.gravatar.com/avatar/d41d8cd98f00b204e9800998ecf8427e"
+ "?size=80"),
+ )
])
-def test_gravatar_url(kwargs, expected):
- assert gravatar_url("test-user@example.com", **kwargs) == expected
+def test_gravatar_url(email, kwargs, expected):
+ assert gravatar_url(email, **kwargs) == expected
@pytest.mark.parametrize(("external",), [(False,), (True,)])
| Error occurs rendering project page when a maintainer doesn't have an email
https://app.getsentry.com/pypi/warehouse/group/10270516/
``` pytb
Stacktrace (most recent call last):
File "raven/middleware.py", line 31, in __call__
iterable = self.application(environ, start_response)
File "site-packages/guard.py", line 62, in __call__
return self.application(environ, _start_response)
File "warehouse/middleware.py", line 28, in __call__
return self.app(environ, _start_response)
File "werkzeug/wsgi.py", line 40, in <lambda>
return update_wrapper(lambda *a: f(*a)(*a[-2:]), f)
File "warehouse/application.py", line 251, in wsgi_app
return view(self, request, **kwargs)
File "warehouse/utils.py", line 91, in wrapper
resp = fn(app, request, *args, **kwargs)
File "warehouse/utils.py", line 174, in wrapper
resp = fn(app, request, *args, **kwargs)
File "warehouse/packaging/views.py", line 101, in project_detail
maintainers=app.models.packaging.get_users_for_project(project),
File "warehouse/utils.py", line 84, in render_response
return Response(template.render(**context), mimetype="text/html")
File "jinja2/environment.py", line 969, in render
return self.environment.handle_exception(exc_info, True)
File "jinja2/environment.py", line 742, in handle_exception
reraise(exc_type, exc_value, tb)
File "/opt/warehouse/site-packages/warehouse/templates/projects/detail.html", line 16, in top-level template code
{% extends "base.html" %}
File "/opt/warehouse/site-packages/warehouse/templates/base.html", line 59, in top-level template code
{% block content %}
File "/opt/warehouse/site-packages/warehouse/templates/projects/detail.html", line 251, in block "content"
<img src="{{ gravatar_url(maintainer.email, size=50) }}" alt="{{ maintainer.username}}" height="50" width="50" class="img-rounded" />
File "warehouse/helpers.py", line 35, in gravatar_url
email_hash = hashlib.md5(email.strip().lower()).hexdigest()
```
| 2013-11-18T03:09:16Z | [] | [] |
|
pypi/warehouse | 207 | pypi__warehouse-207 | [
"175"
] | 8e47fe7cd55a48152935d1ca085c81b67009e218 | diff --git a/warehouse/legacy/pypi.py b/warehouse/legacy/pypi.py
--- a/warehouse/legacy/pypi.py
+++ b/warehouse/legacy/pypi.py
@@ -23,7 +23,9 @@
from warehouse.helpers import url_for
from warehouse.http import Response
from warehouse.legacy import xmlrpc
-from warehouse.utils import cache, fastly, is_valid_json_callback_name
+from warehouse.utils import (
+ cache, fastly, is_valid_json_callback_name, render_response,
+)
def pypi(app, request):
@@ -86,3 +88,49 @@ def project_json(app, request, project_name):
response = Response(data, mimetype="application/json")
response.headers['Content-Disposition'] = 'inline'
return response
+
+
+@cache("legacy_rss")
+@fastly("legacy_rss")
+def rss(app, request):
+ """Dump the last N days' updates as an RSS feed.
+ """
+ releases = app.db.packaging.get_recently_updated(num=40)
+ for release in releases:
+ values = dict(project_name=release['name'], version=release['version'])
+ url = app.urls.build('warehouse.packaging.views.project_detail',
+ values, force_external=True)
+ release.update(dict(url=url))
+
+ response = render_response(
+ app, request, "legacy/rss.xml",
+ description='package updates',
+ releases=releases,
+ site=app.config.site,
+ )
+ response.mimetype = 'text/xml; charset=utf-8'
+ # TODO: throw in a last-modified header too?
+ return response
+
+
+@cache("legacy_rss")
+@fastly("legacy_rss")
+def packages_rss(app, request):
+ """Dump the last N days' new projects as an RSS feed.
+ """
+ releases = app.db.packaging.get_recent_projects(num=40)
+ for release in releases:
+ values = dict(project_name=release['name'])
+ url = app.urls.build('warehouse.packaging.views.project_detail',
+ values, force_external=True)
+ release.update(dict(url=url))
+
+ response = render_response(
+ app, request, "legacy/rss.xml",
+ description='new projects',
+ releases=releases,
+ site=app.config.site,
+ )
+ response.mimetype = 'text/xml; charset=utf-8'
+ # TODO: throw in a last-modified header too?
+ return response
diff --git a/warehouse/packaging/db.py b/warehouse/packaging/db.py
--- a/warehouse/packaging/db.py
+++ b/warehouse/packaging/db.py
@@ -54,6 +54,29 @@ class Database(db.Database):
"""
)
+ get_recent_projects = db.rows(
+ # We only consider projects registered in the last 7 days (see
+ # get_recently_updated for reasoning)
+ """ SELECT
+ p.name, r.version, p.created, r.summary
+ FROM releases r, (
+ SELECT packages.name, max_order, packages.created
+ FROM packages
+ JOIN (
+ SELECT name, max(_pypi_ordering) AS max_order
+ FROM releases
+ WHERE created >= now() - interval '7 days'
+ GROUP BY name
+ ) mo ON packages.name = mo.name
+ ) p
+ WHERE p.name = r.name
+ AND p.max_order = r._pypi_ordering
+ AND p.created >= now() - interval '7 days'
+ ORDER BY p.created DESC
+ LIMIT %(num)s
+ """
+ )
+
get_releases_since = db.rows(
""" SELECT name, version, created, summary
FROM releases
| diff --git a/tests/legacy/test_pypi.py b/tests/legacy/test_pypi.py
--- a/tests/legacy/test_pypi.py
+++ b/tests/legacy/test_pypi.py
@@ -19,9 +19,14 @@
import pretend
import pytest
+
+import jinja2
+
from werkzeug.exceptions import NotFound, BadRequest
+from werkzeug.routing import Map
from warehouse.legacy import pypi, xmlrpc
+from warehouse.packaging import urls
@pytest.mark.parametrize("content_type", [None, "text/html", "__empty__"])
@@ -151,3 +156,182 @@ def test_json_missing(monkeypatch, project):
with pytest.raises(NotFound):
pypi.project_json(app, request, project_name='spam')
+
+
+def test_rss(monkeypatch):
+ get_recently_updated = pretend.call_recorder(lambda num=10: [
+ dict(name='spam', version='1.0', summary='hai spam', created='now'),
+ dict(name='ham', version='2.0', summary='hai ham', created='now'),
+ dict(name='spam', version='2.0', summary='hai spam v2', created='now'),
+ ])
+ template = pretend.stub(
+ render=pretend.call_recorder(lambda **ctx: "<xml>dummy</xml>"),
+ )
+ app = pretend.stub(
+ db=pretend.stub(
+ packaging=pretend.stub(
+ get_recently_updated=get_recently_updated,
+ )
+ ),
+ config=pretend.stub(
+ cache=pretend.stub(browser=False, varnish=False),
+ site={"url": "http://test.server/", "name": "PyPI"},
+ ),
+ urls=Map(urls.urls).bind('test.server', '/'),
+ templates=pretend.stub(
+ get_template=pretend.call_recorder(lambda t: template),
+ ),
+ )
+ request = pretend.stub()
+
+ resp = pypi.rss(app, request)
+
+ assert get_recently_updated.calls == [pretend.call(num=40)]
+ assert len(template.render.calls) == 1
+ assert template.render.calls[0].kwargs['releases'] == [
+ {
+ 'url': 'http://test.server/project/spam/1.0/',
+ 'version': u'1.0',
+ 'name': u'spam',
+ 'summary': u'hai spam',
+ 'created': u'now',
+ }, {
+ 'url': 'http://test.server/project/ham/2.0/',
+ 'version': u'2.0',
+ 'name': u'ham',
+ 'summary': u'hai ham',
+ 'created': u'now',
+ }, {
+ 'url': 'http://test.server/project/spam/2.0/',
+ 'version': u'2.0',
+ 'name': u'spam',
+ 'summary': u'hai spam v2',
+ 'created': u'now',
+ }]
+ assert resp.data == "<xml>dummy</xml>"
+
+
+def test_packages_rss(monkeypatch):
+ get_recent_projects = pretend.call_recorder(lambda num=10: [
+ dict(name='spam', version='1.0', summary='hai spam', created='now'),
+ dict(name='ham', version='2.0', summary='hai ham', created='now'),
+ dict(name='eggs', version='21.0', summary='hai eggs!', created='now'),
+ ])
+ template = pretend.stub(
+ render=pretend.call_recorder(lambda **ctx: "<xml>dummy</xml>"),
+ )
+ app = pretend.stub(
+ db=pretend.stub(
+ packaging=pretend.stub(
+ get_recent_projects=get_recent_projects,
+ )
+ ),
+ config=pretend.stub(
+ cache=pretend.stub(browser=False, varnish=False),
+ site={"url": "http://test.server/", "name": "PyPI"},
+ ),
+ urls=Map(urls.urls).bind('test.server', '/'),
+ templates=pretend.stub(
+ get_template=pretend.call_recorder(lambda t: template),
+ ),
+ )
+ request = pretend.stub()
+
+ resp = pypi.packages_rss(app, request)
+
+ assert get_recent_projects.calls == [pretend.call(num=40)]
+ assert len(template.render.calls) == 1
+ assert template.render.calls[0].kwargs['releases'] == [
+ {
+ 'url': 'http://test.server/project/spam/',
+ 'version': u'1.0',
+ 'name': u'spam',
+ 'summary': u'hai spam',
+ 'created': u'now',
+ }, {
+ 'url': 'http://test.server/project/ham/',
+ 'version': u'2.0',
+ 'name': u'ham',
+ 'summary': u'hai ham',
+ 'created': u'now',
+ }, {
+ 'url': 'http://test.server/project/eggs/',
+ 'version': u'21.0',
+ 'name': u'eggs',
+ 'summary': u'hai eggs!',
+ 'created': u'now',
+ }]
+ assert resp.data == "<xml>dummy</xml>"
+
+
+def test_rss_xml_template(monkeypatch):
+ templates = jinja2.Environment(
+ autoescape=True,
+ auto_reload=False,
+ extensions=[
+ "jinja2.ext.i18n",
+ ],
+ loader=jinja2.PackageLoader("warehouse"),
+ )
+ template = templates.get_template('legacy/rss.xml')
+ content = template.render(
+ site=dict(url='http://test.server/', name="PyPI"),
+ description='package updates',
+ releases=[
+ {
+ 'url': 'http://test.server/project/spam/',
+ 'version': u'1.0',
+ 'name': u'spam',
+ 'summary': u'hai spam',
+ 'created': datetime.date(1970, 1, 1),
+ }, {
+ 'url': 'http://test.server/project/ham/',
+ 'version': u'2.0',
+ 'name': u'ham',
+ 'summary': u'hai ham',
+ 'created': datetime.date(1970, 1, 1),
+ }, {
+ 'url': 'http://test.server/project/eggs/',
+ 'version': u'21.0',
+ 'name': u'eggs',
+ 'summary': u'hai eggs!',
+ 'created': datetime.date(1970, 1, 1),
+ }
+ ],
+ )
+ assert content == '''<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE rss PUBLIC "-//Netscape Communications//DTD RSS 0.91//EN" \
+"http://my.netscape.com/publish/formats/rss-0.91.dtd">
+<rss version="0.91">
+ <channel>
+ <title>PyPI Recent Package Updates</title>
+ <link>http://test.server/</link>
+ <description>Recent package updates at PyPI</description>
+ <language>en</language>
+ \n\
+ <item>
+ <title>spam 1.0</title>
+ <link>http://test.server/project/spam/</link>
+ <guid>http://test.server/project/spam/</guid>
+ <description>hai spam</description>
+ <pubDate>01 Jan 1970 00:00:00 GMT</pubDate>
+ </item>
+ \n\
+ <item>
+ <title>ham 2.0</title>
+ <link>http://test.server/project/ham/</link>
+ <guid>http://test.server/project/ham/</guid>
+ <description>hai ham</description>
+ <pubDate>01 Jan 1970 00:00:00 GMT</pubDate>
+ </item>
+ \n\
+ <item>
+ <title>eggs 21.0</title>
+ <link>http://test.server/project/eggs/</link>
+ <guid>http://test.server/project/eggs/</guid>
+ <description>hai eggs!</description>
+ <pubDate>01 Jan 1970 00:00:00 GMT</pubDate>
+ </item>
+ \n\
+ </channel>
+</rss>'''
diff --git a/tests/packaging/test_db.py b/tests/packaging/test_db.py
--- a/tests/packaging/test_db.py
+++ b/tests/packaging/test_db.py
@@ -415,6 +415,56 @@ def test_top_projects(num, result, dbapp):
assert top == result
+def test_get_recent_projects(dbapp):
+ def create_package(name, version, ordering, created):
+ dbapp.engine.execute(packages.insert().values(
+ name=name, created=created))
+ dbapp.engine.execute(releases.insert().values(
+ name=name, version=version, _pypi_ordering=ordering,
+ created=created))
+
+ now = datetime.datetime.utcnow()
+ create_package("foo1", "2.0", 2, now)
+ create_package("foo2", "1.0", 1, now - datetime.timedelta(seconds=45))
+ create_package("foo3", "1.0", 1, now - datetime.timedelta(seconds=15))
+ create_package("foo4", "1.0", 1, now - datetime.timedelta(seconds=40))
+ create_package("foo5", "1.0", 1, now - datetime.timedelta(seconds=25))
+ create_package("foo6", "1.0", 1, now - datetime.timedelta(seconds=30))
+ create_package("foo7", "1.0", 1, now - datetime.timedelta(seconds=35))
+
+ dbapp.engine.execute(releases.insert().values(
+ name="foo1", version="1.0", _pypi_ordering=1,
+ created=now - datetime.timedelta(seconds=5),
+ ))
+
+ assert dbapp.db.packaging.get_recent_projects(num=4) == [
+ {
+ "name": "foo1",
+ "version": "2.0",
+ "summary": None,
+ "created": now,
+ },
+ {
+ "name": "foo3",
+ "version": "1.0",
+ "summary": None,
+ "created": now - datetime.timedelta(seconds=15),
+ },
+ {
+ "name": "foo5",
+ "version": "1.0",
+ "summary": None,
+ "created": now - datetime.timedelta(seconds=25),
+ },
+ {
+ "name": "foo6",
+ "version": "1.0",
+ "summary": None,
+ "created": now - datetime.timedelta(seconds=30),
+ },
+ ]
+
+
@pytest.mark.parametrize(("name", "normalized"), [
("foo_bar", "foo-bar"),
("Bar", "bar"),
| implement legacy RSS feeds
This builds on top of the JSON changes, so that should be accepted first. I think I mis-gitted somewhere here :(
| 2014-02-28T15:05:35Z | [] | [] |
|
pypi/warehouse | 214 | pypi__warehouse-214 | [
"212"
] | ebc58ccb9531080502c26f576f48b6a89243dcf0 | diff --git a/warehouse/search/indexes.py b/warehouse/search/indexes.py
--- a/warehouse/search/indexes.py
+++ b/warehouse/search/indexes.py
@@ -25,8 +25,6 @@
class Index(object):
- _index = "warehouse"
-
def __init__(self, db, config):
self.db = db
self.config = config
@@ -37,14 +35,16 @@ def __init__(self, db, config):
self.types = AttributeDict()
+ self._index = config.index
+
def register(self, type_):
obj = type_(self)
self.types[obj._type] = obj
- def reindex(self, index=None, alias=True, keep_old=False):
+ def reindex(self, alias=True, keep_old=False):
# Generate an Index Name for Warehouse
index = "".join([
- index if index is not None else self._index,
+ self._index,
binascii.hexlify(os.urandom(4)),
])
| diff --git a/tests/search/test_indexes.py b/tests/search/test_indexes.py
--- a/tests/search/test_indexes.py
+++ b/tests/search/test_indexes.py
@@ -32,7 +32,7 @@ def test_reindex(self, monkeypatch):
monkeypatch.setattr(os, "urandom", urandom)
models = pretend.stub()
- config = pretend.stub(hosts=[], get=lambda *a: {})
+ config = pretend.stub(index="warehouse", hosts=[], get=lambda *a: {})
index = Index(models, config)
index.es = pretend.stub(
@@ -69,7 +69,7 @@ def test_reindex_no_alias(self, monkeypatch):
monkeypatch.setattr(os, "urandom", urandom)
models = pretend.stub()
- config = pretend.stub(hosts=[], get=lambda *a: {})
+ config = pretend.stub(index="warehouse", hosts=[], get=lambda *a: {})
index = Index(models, config)
index.es = pretend.stub(
@@ -101,7 +101,7 @@ def test_reindex_no_alias(self, monkeypatch):
def test_update_alias(self):
models = pretend.stub()
- config = pretend.stub(hosts=[], get=lambda *a: {})
+ config = pretend.stub(index="warehouse", hosts=[], get=lambda *a: {})
index = Index(models, config)
index.es = pretend.stub(
@@ -134,7 +134,7 @@ def test_update_alias(self):
def test_update_alias_no_old_index(self):
models = pretend.stub()
- config = pretend.stub(hosts=[], get=lambda *a: {})
+ config = pretend.stub(index="warehouse", hosts=[], get=lambda *a: {})
def _get_alias(idx):
raise TransportError(404, "Fake 404")
@@ -160,7 +160,7 @@ def _get_alias(idx):
def test_update_alias_exception(self):
models = pretend.stub()
- config = pretend.stub(hosts=[], get=lambda *a: {})
+ config = pretend.stub(index="warehouse", hosts=[], get=lambda *a: {})
def _get_alias(idx):
raise TransportError(500, "Fake 500")
diff --git a/tests/test_application.py b/tests/test_application.py
--- a/tests/test_application.py
+++ b/tests/test_application.py
@@ -41,6 +41,7 @@ def test_basic_instantiation():
"url": "redis://localhost:6379/0"
},
"search": {
+ "index": "warehouse",
"hosts": [],
},
"logging": {
| Elasticsearch index name should be configurable.
This will allow us to use the same elastic search infrastructure for testpypi as well as the real deal.
| 2014-03-02T22:52:11Z | [] | [] |
|
pypi/warehouse | 230 | pypi__warehouse-230 | [
"229"
] | fc584c48bc45373dfa69b55b7222069122ac0b49 | diff --git a/warehouse/legacy/xmlrpc.py b/warehouse/legacy/xmlrpc.py
--- a/warehouse/legacy/xmlrpc.py
+++ b/warehouse/legacy/xmlrpc.py
@@ -120,6 +120,7 @@ def release_urls(self, name, version):
has_sig=r['pgp_url'] is not None,
python_version=r['python_version'],
comment_text=r['comment_text'],
+ upload_time=r['upload_time'],
))
return l
| diff --git a/tests/legacy/test_xmlrpc.py b/tests/legacy/test_xmlrpc.py
--- a/tests/legacy/test_xmlrpc.py
+++ b/tests/legacy/test_xmlrpc.py
@@ -416,6 +416,8 @@ def test_xmlrpc_list_packages_with_serial():
@pytest.mark.parametrize("pgp", [True, False])
def test_release_urls(pgp, monkeypatch):
+ dt = datetime.datetime.utcnow()
+
downloads = [
dict(
name="spam",
@@ -430,6 +432,7 @@ def test_release_urls(pgp, monkeypatch):
pgp_url='/packages/source/t/spam/spam-1.0.tar.gz.sig'
if pgp else None,
comment_text='download for great justice',
+ upload_time=dt,
),
dict(
name="spam",
@@ -444,6 +447,7 @@ def test_release_urls(pgp, monkeypatch):
pgp_url='/packages/source/t/spam/spam-1.0.zip.sig'
if pgp else None,
comment_text=None,
+ upload_time=dt,
)
]
app = pretend.stub(
@@ -472,6 +476,7 @@ def test_release_urls(pgp, monkeypatch):
has_sig=pgp,
python_version="source",
comment_text='download for great justice',
+ upload_time=dt,
),
dict(
url='/packages/source/t/spam/spam-1.0.zip',
@@ -483,6 +488,7 @@ def test_release_urls(pgp, monkeypatch):
has_sig=pgp,
python_version="source",
comment_text=None,
+ upload_time=dt,
)
]
| JSON API get results in 500 error
https://pypi-preview.a.ssl.fastly.net/pypi/wget/json
As mentioned in #58 this API is ported and should work, but it looks like it is not covered with tests.
Expected: https://pypi.python.org/pypi/wget/json
| 2014-03-07T05:40:01Z | [] | [] |
|
pypi/warehouse | 242 | pypi__warehouse-242 | [
"228"
] | cbc37505dce66a35997d7fde8535712fc96af32a | diff --git a/warehouse/legacy/pypi.py b/warehouse/legacy/pypi.py
--- a/warehouse/legacy/pypi.py
+++ b/warehouse/legacy/pypi.py
@@ -24,7 +24,7 @@
from warehouse.http import Response
from warehouse.legacy import xmlrpc
from warehouse.utils import (
- cache, fastly, is_valid_json_callback_name, render_response,
+ cache, cors, fastly, is_valid_json_callback_name, render_response,
)
@@ -49,6 +49,7 @@ def daytime(app, request):
return Response(response, mimetype="text/plain")
+@cors
@cache(browser=1, varnish=120)
@fastly("legacy-json", "legacy-json~{project_name!n}")
def project_json(app, request, project_name):
diff --git a/warehouse/utils.py b/warehouse/utils.py
--- a/warehouse/utils.py
+++ b/warehouse/utils.py
@@ -291,3 +291,17 @@ def camouflage_images(camo_url, camo_key, html):
tree_walker = html5lib.treewalkers.getTreeWalker("dom")
html_serializer = html5lib.serializer.htmlserializer.HTMLSerializer()
return "".join(html_serializer.serialize(tree_walker(dom)))
+
+
+def cors(fn):
+ @functools.wraps(fn)
+ def wrapper(app, request, *args, **kwargs):
+ # Get the response from the view
+ resp = fn(app, request, *args, **kwargs)
+
+ # Add our CORS headers
+ resp.headers["Access-Control-Allow-Origin"] = "*"
+
+ # Return the modified response
+ return resp
+ return wrapper
| diff --git a/tests/test_utils.py b/tests/test_utils.py
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -23,7 +23,7 @@
AttributeDict, FastlyFormatter, convert_to_attr_dict, merge_dict,
render_response, cache, get_wsgi_application, get_mimetype, redirect,
SearchPagination, is_valid_json_callback_name, generate_camouflage_url,
- camouflage_images,
+ camouflage_images, cors,
)
@@ -260,3 +260,14 @@ def test_generate_camouflage_url(camo_url, camo_key, url, expected):
])
def test_camouflage_images(camo_url, camo_key, html, expected):
assert camouflage_images(camo_url, camo_key, html) == expected
+
+
+def test_cors():
+ app = pretend.stub()
+ request = pretend.stub()
+ response = pretend.stub(headers={})
+
+ resp = cors(lambda *a, **kw: response)(app, request)
+
+ assert resp is response
+ assert resp.headers == {"Access-Control-Allow-Origin": "*"}
| Set 'Access-Control-Allow-Origin: *` in JSON requests
To allow pure JS frontends to be hosted elsewhere.
| Test is here - http://jsfiddle.net/FhQ5a/1/ - open developer console to see result.
| 2014-03-30T00:11:47Z | [] | [] |
pypi/warehouse | 272 | pypi__warehouse-272 | [
"256"
] | 777be54c639ef681c7e5b8e01cd2f61d9e027c14 | diff --git a/warehouse/application.py b/warehouse/application.py
--- a/warehouse/application.py
+++ b/warehouse/application.py
@@ -48,6 +48,7 @@
from warehouse.csrf import handle_csrf
from warehouse.datastructures import AttributeDict
from warehouse.http import Request
+from warehouse.middlewares import XForwardedTokenMiddleware
from warehouse.packaging import helpers as packaging_helpers
from warehouse.packaging.search import ProjectMapping
from warehouse.search.indexes import Index
@@ -205,6 +206,14 @@ def __init__(self, config, engine=None, redis_class=redis.StrictRedis):
],
)
+ # This is last because we want it processed first in the stack of
+ # middlewares. This will ensure that we strip X-Forwarded-* headers
+ # if the request doesn't come from Fastly
+ self.wsgi_app = XForwardedTokenMiddleware(
+ self.wsgi_app,
+ self.config.site.access_token,
+ )
+
def __call__(self, environ, start_response):
"""
Shortcut for :attr:`wsgi_app`.
diff --git a/warehouse/middlewares.py b/warehouse/middlewares.py
new file mode 100644
--- /dev/null
+++ b/warehouse/middlewares.py
@@ -0,0 +1,33 @@
+# Copyright 2014 Donald Stufft
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import hmac
+
+
+class XForwardedTokenMiddleware:
+
+ header = "HTTP_X_WAREHOUSE_ACCESS_TOKEN"
+
+ def __init__(self, app, token):
+ self.app = app
+ self.token = token
+
+ def __call__(self, environ, start_response):
+ # Filter out X-Forwarded-* headers from the request if the secret token
+ # does not exist or does not match.
+ if not hmac.compare_digest(environ.pop(self.header, ""), self.token):
+ for key in set(environ.keys()):
+ if key.startswith("HTTP_X_FORWARDED_"):
+ del environ[key]
+
+ return self.app(environ, start_response)
| diff --git a/tests/conftest.py b/tests/conftest.py
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -231,7 +231,10 @@ def dbapp(database, engine):
return Warehouse.from_yaml(
override={
- "site": {"hosts": "localhost"},
+ "site": {
+ "access_token": "testing",
+ "hosts": "localhost",
+ },
"database": {"url": database},
"redis": {
"downloads": "redis://nonexistant/0",
@@ -257,7 +260,10 @@ def connect():
return Warehouse.from_yaml(
override={
- "site": {"hosts": "localhost"},
+ "site": {
+ "access_token": "testing",
+ "hosts": "localhost",
+ },
"database": {"url": "postgresql:///nonexistant"},
"redis": {
"downloads": "redis://nonexistant/0",
diff --git a/tests/test_application.py b/tests/test_application.py
--- a/tests/test_application.py
+++ b/tests/test_application.py
@@ -32,6 +32,9 @@
def test_basic_instantiation():
Warehouse({
"debug": False,
+ "site": {
+ "access_token": "testing",
+ },
"database": {
"url": "postgres:///test_warehouse",
},
diff --git a/tests/test_config.yml b/tests/test_config.yml
--- a/tests/test_config.yml
+++ b/tests/test_config.yml
@@ -1,3 +1,6 @@
+site:
+ access_token: testing
+
assets:
directory: "static"
diff --git a/tests/test_middlewares.py b/tests/test_middlewares.py
new file mode 100644
--- /dev/null
+++ b/tests/test_middlewares.py
@@ -0,0 +1,58 @@
+# Copyright 2014 Donald Stufft
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pretend
+
+from warehouse.middlewares import XForwardedTokenMiddleware
+
+
+def test_xforwardedtokenmiddleware_valid():
+ response = pretend.stub()
+ start_response = pretend.stub()
+ app = pretend.call_recorder(lambda environ, start_response: response)
+
+ middleware = XForwardedTokenMiddleware(app, "1234")
+ resp = middleware(
+ {
+ "HTTP_X_WAREHOUSE_ACCESS_TOKEN": "1234",
+ "HTTP_X_FORWARDED_FOR": "192.168.1.1",
+ },
+ start_response,
+ )
+
+ assert resp is response
+ assert app.calls == [
+ pretend.call(
+ {"HTTP_X_FORWARDED_FOR": "192.168.1.1"},
+ start_response,
+ ),
+ ]
+
+
+def test_xforwardedtokenmiddleware_invalid():
+ response = pretend.stub()
+ start_response = pretend.stub()
+ app = pretend.call_recorder(lambda environ, start_response: response)
+
+ middleware = XForwardedTokenMiddleware(app, "1234")
+ resp = middleware(
+ {
+ "HTTP_X_WAREHOUSE_ACCESS_TOKEN": "invalid",
+ "HTTP_X_FORWARDED_FOR": "192.168.1.1",
+ },
+ start_response,
+ )
+
+ assert resp is response
+ assert app.calls == [pretend.call({}, start_response)]
| Only trust X-Forwarded-* headers when they come from Fastly
Split out from #248 because it's not exactly related to that.
We need a way to ensure that the `X-Forwarded-*` headers are only trusted when they come from Fastly. Additionally we need to ensure that Fastly is indeed actually setting them and stripping from incoming requests.
| On #248 @ewdurbin said:
> short convo with @dstufft on IRC lead to two options on trusting specific headers.
> - maintain a whitelist of upstream Fastly cache nodes
> - generate HTTPS Client Certs for Fastly nodes
>
> HTTPS Client Certs leave us less need for manually or regularly updating an IP white list. (At this time > Fastly distributes this information via mailing list updates)
>
> Generating these certs could occur on a per backend basis, one cert for Fastly and multiple for developer and infrastructure use.
>
> Responsibility for stripping unauthorized headers could fall on either nginx or the warehouse application.
>
> nginx can be used to verify the client certificate and either set an nginx boolean or a header (with a header we would need to strip it beforehand).
@mattrobenolt said on irc:
```
[00:59:13] <mattrobenolt> dstufft: We do the Fastly <-> Disqus verification with a secret header.
[00:59:48] <mattrobenolt> So we set a Fastly header when it sends the request to the backend. Just some random key. Then our backends reject anything that doesn't have that key.
```
Specifically, in vcl we do:
```
sub vcl_recv {
set req.http.X-Forwarded-For = req.http.Fastly-Client-IP;
set req.http.X-Something-Something = "foo";
}
```
Then in haproxy:
```
acl via_fastly hdr(X-Something-Something) foo
block unless via_fastly
```
Using a secret header seems to be the easiest method to me.
secret header sounds straightforward and easy to manage.
@dstufft where in the stack would you like header validation to exist and how should non-cdn traffic be handled?
@ewdurbin If we can make Nginx strip X-Forwarded-\* headers unless that secret header exists that would be easiest. If that's hard to do (or bad to do) I can do it in a WSGI middleware too. It doesn't matter to me. I don't mind if non cdn traffic can still hit the backend directly (makes it easy for us to test things) we should just strip the headers when it's non CDN.
Can you use this to solve the problems? http://wiki.nginx.org/HttpRealipModule We do this to keep the IP address from the first hop outside datacenter. So using `set_real_ip_from`. I bet you can mix and match to make magic.
##
Matt Robenolt
@mattrobenolt
On Sunday, April 6, 2014 at 5:23 PM, Donald Stufft wrote:
> @ewdurbin (https://github.com/ewdurbin) If we can make Nginx strip X-Forwarded-\* headers unless that secret header exists that would be easiest. If that's hard to do (or bad to do) I can do it in a WSGI middleware too. It doesn't matter to me. I don't mind if non cdn traffic can still hit the backend directly (makes it easy for us to test things) we should just strip the headers when it's non CDN.
>
> —
> Reply to this email directly or view it on GitHub (https://github.com/pypa/warehouse/issues/256#issuecomment-39688123).
In other words, it's easy for me to do it in Warehouse, but I have a slight preference to not do it in Warehouse if it's not hard.
Actually, if you just set `set_real_ip_from` to Fastly's IPs as well as your internal subnet, everything should be fine, unless it's possible to spoof the connecting IP address to nginx, and afaik, that's not possible.
in the interest of not maintaining a whitelist for `set_real_ip_from` i'd push for warehouse to have a configurable secret header.
ultimately this pushes the responsibility for verifying the header and stripping the appropriate headers to middleware in the warehouse project.
| 2014-04-14T17:22:23Z | [] | [] |
pypi/warehouse | 273 | pypi__warehouse-273 | [
"268"
] | 3fe9cf1544b885686142baf9b19742356005f742 | diff --git a/warehouse/accounts/db.py b/warehouse/accounts/db.py
--- a/warehouse/accounts/db.py
+++ b/warehouse/accounts/db.py
@@ -40,13 +40,12 @@ def get_user(self, name):
LIMIT 1
"""
- with self.engine.connect() as conn:
- result = conn.execute(query, username=name).first()
+ result = self.engine.execute(query, username=name).first()
- if result is not None:
- result = dict(result)
+ if result is not None:
+ result = dict(result)
- return result
+ return result
def user_authenticate(self, username, password):
# Get the user with the given username
@@ -57,8 +56,9 @@ def user_authenticate(self, username, password):
LIMIT 1
"""
- with self.engine.begin() as conn:
- password_hash = conn.execute(query, username=username).scalar()
+ with self.engine.begin():
+ password_hash = self.engine.execute(query, username=username).\
+ scalar()
# If the user was not found, then return None
if password_hash is None:
@@ -79,7 +79,7 @@ def user_authenticate(self, username, password):
if valid:
if new_hash:
- conn.execute(
+ self.engine.execute(
""" UPDATE accounts_user
SET password = %(password)s
WHERE username = %(username)s
diff --git a/warehouse/db.py b/warehouse/db.py
--- a/warehouse/db.py
+++ b/warehouse/db.py
@@ -33,13 +33,12 @@ def scalar(query, default=None):
the database and return a scalar.
"""
def inner(model, *args, **kwargs):
- with model.engine.connect() as conn:
- val = conn.execute(query, *args, **kwargs).scalar()
+ val = model.engine.execute(query, *args, **kwargs).scalar()
- if default is not None and val is None:
- return default
- else:
- return val
+ if default is not None and val is None:
+ return default
+ else:
+ return val
return inner
@@ -50,8 +49,8 @@ def rows(query, row_func=dict):
the database and return a list of rows with the row_func applied to each.
"""
def inner(model, *args, **kwargs):
- with model.engine.connect() as conn:
- return [row_func(r) for r in conn.execute(query, *args, **kwargs)]
+ return [row_func(r) for r in
+ model.engine.execute(query, *args, **kwargs)]
return inner
@@ -62,10 +61,9 @@ def mapping(query, key_func=lambda r: r[0], value_func=lambda r: r[1]):
created a mapping that maps each row to a key: value pair.
"""
def inner(model, *args, **kwargs):
- with model.engine.connect() as conn:
- return {
- key_func(r): value_func(r)
- for r in conn.execute(query, *args, **kwargs)
- }
+ return {
+ key_func(r): value_func(r)
+ for r in model.engine.execute(query, *args, **kwargs)
+ }
return inner
diff --git a/warehouse/packaging/db.py b/warehouse/packaging/db.py
--- a/warehouse/packaging/db.py
+++ b/warehouse/packaging/db.py
@@ -107,8 +107,7 @@ def get_top_projects(self, num=None):
if num:
query += "LIMIT %(limit)s"
- with self.engine.connect() as conn:
- return [tuple(r) for r in conn.execute(query, limit=num)]
+ return [tuple(r) for r in self.engine.execute(query, limit=num)]
get_project = db.scalar(
""" SELECT name
@@ -250,38 +249,37 @@ def get_downloads(self, project, version):
"""
results = []
- with self.engine.connect() as conn:
- for r in conn.execute(query, project=project, version=version):
- result = dict(r)
- result["filepath"] = os.path.join(
- self.app.config.paths.packages,
- result["python_version"],
- result["name"][0],
- result["name"],
- result["filename"],
- )
- if not os.path.exists(result["filepath"]):
- log.error(
- "%s missing for package %s %s",
- result["filepath"],
- result["name"],
- result["version"])
- continue
- result["url"] = "/".join([
- "/packages",
- result["python_version"],
- result["name"][0],
+ for r in self.engine.execute(query, project=project, version=version):
+ result = dict(r)
+ result["filepath"] = os.path.join(
+ self.app.config.paths.packages,
+ result["python_version"],
+ result["name"][0],
+ result["name"],
+ result["filename"],
+ )
+ if not os.path.exists(result["filepath"]):
+ log.error(
+ "%s missing for package %s %s",
+ result["filepath"],
result["name"],
- result["filename"],
- ])
- result["size"] = os.path.getsize(result["filepath"])
-
- if os.path.exists(result["filepath"] + ".asc"):
- result["pgp_url"] = result["url"] + ".asc"
- else:
- result["pgp_url"] = None
-
- results.append(result)
+ result["version"])
+ continue
+ result["url"] = "/".join([
+ "/packages",
+ result["python_version"],
+ result["name"][0],
+ result["name"],
+ result["filename"],
+ ])
+ result["size"] = os.path.getsize(result["filepath"])
+
+ if os.path.exists(result["filepath"] + ".asc"):
+ result["pgp_url"] = result["url"] + ".asc"
+ else:
+ result["pgp_url"] = None
+
+ results.append(result)
return results
@@ -297,11 +295,11 @@ def get_release(self, project, version):
LIMIT 1
"""
- with self.engine.connect() as conn:
- result = [
- dict(r)
- for r in conn.execute(query, project=project, version=version)
- ][0]
+ result = [
+ dict(r)
+ for r in self.engine.execute(query, project=project,
+ version=version)
+ ][0]
# Load dependency information
query = \
@@ -312,23 +310,22 @@ def get_release(self, project, version):
"""
dependency_data = {}
- with self.engine.connect() as conn:
- for dependency in conn.execute(
- query,
- project=project,
- version=version):
- kind = ReleaseDependencyKind(dependency["kind"])
-
- if kind in {
- ReleaseDependencyKind.requires_dist,
- ReleaseDependencyKind.provides_dist,
- ReleaseDependencyKind.obsoletes_dist}:
- value = dependency_data.setdefault(kind.name, [])
- value.append(dependency["specifier"])
-
- if kind is ReleaseDependencyKind.project_url:
- value = dependency_data.setdefault(kind.name, {})
- value.update(dict([dependency["specifier"].split(",", 1)]))
+ for dependency in self.engine.execute(
+ query,
+ project=project,
+ version=version):
+ kind = ReleaseDependencyKind(dependency["kind"])
+
+ if kind in {
+ ReleaseDependencyKind.requires_dist,
+ ReleaseDependencyKind.provides_dist,
+ ReleaseDependencyKind.obsoletes_dist}:
+ value = dependency_data.setdefault(kind.name, [])
+ value.append(dependency["specifier"])
+
+ if kind is ReleaseDependencyKind.project_url:
+ value = dependency_data.setdefault(kind.name, {})
+ value.update(dict([dependency["specifier"].split(",", 1)]))
result.update(dependency_data)
return result
@@ -434,11 +431,10 @@ def get_classifier_ids(self, classifiers):
WHERE classifier IN %(classifiers)s
"""
- with self.engine.connect() as conn:
- return {
- r["classifier"]: r["id"]
- for r in conn.execute(query, classifiers=tuple(classifiers))
- }
+ return {
+ r["classifier"]: r["id"]
+ for r in self.engine.execute(query, classifiers=tuple(classifiers))
+ }
def search_by_classifier(self, selected_classifiers):
# Note: selected_classifiers is a list of ids from trove_classifiers
@@ -448,16 +444,15 @@ def search_by_classifier(self, selected_classifiers):
# generate trove id -> level mapping
trove = {}
query = "SELECT * FROM trove_classifiers"
- with self.engine.connect() as conn:
- for id, classifier, l2, l3, l4, l5 in conn.execute(query):
- if id == l2:
- trove[id] = 2
- elif id == l3:
- trove[id] = 3
- elif id == l4:
- trove[id] = 4
- else:
- trove[id] = 5
+ for id, classifier, l2, l3, l4, l5 in self.engine.execute(query):
+ if id == l2:
+ trove[id] = 2
+ elif id == l3:
+ trove[id] = 3
+ elif id == l4:
+ trove[id] = 4
+ else:
+ trove[id] = 5
# compute a statement to produce all packages selected
query = "SELECT name, version FROM releases"
@@ -473,9 +468,8 @@ def search_by_classifier(self, selected_classifiers):
""" % (query, level, c)
releases = []
- with self.engine.connect() as conn:
- for name, version in conn.execute(query):
- releases.append((name, version))
+ for name, version in self.engine.execute(query):
+ releases.append((name, version))
return releases
| diff --git a/tests/conftest.py b/tests/conftest.py
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -11,209 +11,71 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-import os
-import shutil
-import signal
-import socket
import subprocess
-import tempfile
-import time
-import urllib.parse
-
-import pretend
-import pytest
+import os
+from sqlalchemy.engine import create_engine
+from sqlalchemy.pool import AssertionPool
import alembic.config
import alembic.command
-import psycopg2
-import psycopg2.extensions
-import sqlalchemy
-import sqlalchemy.pool
+import pretend
+import pytest
def pytest_collection_modifyitems(items):
for item in items:
# Mark any item with one of the database fixture as using the db
- if set(getattr(item, "funcargnames", [])) & {"postgresql", "database"}:
+ if set(getattr(item, "funcargnames", [])) & {"engine", "database"}:
item.add_marker(pytest.mark.db)
-def pytest_addoption(parser):
- group = parser.getgroup("warehouse")
- group.addoption(
- "--database-url",
- default=None,
- help="An url to an already created warehouse test database",
- )
-
-
-def _get_open_port():
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- s.bind(("", 0))
- s.listen(1)
-
- port = s.getsockname()[1]
-
- s.close()
-
- return port
-
-
-@pytest.fixture(scope="session")
-def postgresql(request):
- # First check to see if we've been given a database url that we should use
- # instead
- database_url = (
- os.environ.get("WAREHOUSE_DATABASE_URL")
- or request.config.getoption("--database-url")
- )
-
- if database_url is not None:
- return database_url
-
- # Get an open port to use for our PostgreSQL server
- port = _get_open_port()
-
- # Create a temporary directory to use as our data directory
- tmpdir = tempfile.mkdtemp()
-
- # Initial a database in our temporary directory
- subprocess.check_call(
- ["initdb", "-D", tmpdir],
- stdout=subprocess.DEVNULL,
- stderr=subprocess.DEVNULL,
- )
-
- proc = subprocess.Popen(
- ["postgres", "-D", tmpdir, "-p", str(port),
- "-h", "127.0.0.1", "-k", tmpdir],
- stdout=subprocess.DEVNULL,
- stderr=subprocess.DEVNULL,
- )
-
- # Register a finalizer that will kill the started PostgreSQL server
- @request.addfinalizer
- def finalize():
- # Terminate the PostgreSQL process
- proc.send_signal(signal.SIGINT)
- proc.wait()
-
- # Remove the data directory
- shutil.rmtree(tmpdir, ignore_errors=True)
-
- for _ in range(5):
- try:
- conn = psycopg2.connect(
- database="postgres",
- host="localhost",
- port=port,
- connect_timeout=10,
- )
- except psycopg2.OperationalError:
- # Pause for a moment to give postgresql time to start
- time.sleep(1)
- else:
- # Set our isolation level
- conn.set_isolation_level(
- psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT
- )
-
- # Create a database for the warehouse tests
- cursor = conn.cursor()
- cursor.execute("CREATE DATABASE warehouse ENCODING 'UTF8'")
-
- # Commit our changes and close the connection
- cursor.close()
- conn.close()
+@pytest.fixture(scope='session')
+def database(request):
+ """Creates the warehouse_unittest database, builds the schema and returns
+ an SQLALchemy Connection to the database.
+ """
- break
+ if os.getenv('WAREHOUSE_DATABASE_URL'):
+ # Assume that the database was externally created
+ url = os.getenv('WAREHOUSE_DATABASE_URL')
else:
- raise RuntimeError("Could not start a PostgreSQL instance")
-
- return "postgresql://localhost:{}/warehouse".format(port)
-
-
-@pytest.fixture(scope="session")
-def database(postgresql):
- details = urllib.parse.urlparse(postgresql)
-
- # Ensure all extensions that we require are installed
- conn = psycopg2.connect(
- database=details.path[1:],
- host=details.hostname,
- port=details.port,
- )
- cursor = conn.cursor()
- cursor.execute("DROP SCHEMA public CASCADE")
- cursor.execute("CREATE SCHEMA public")
- cursor.execute("CREATE EXTENSION IF NOT EXISTS citext")
- cursor.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"')
- conn.commit()
- cursor.close()
- conn.close()
-
+ # (Drop and) create the warehouse_unittest database with UTF-8 encoding
+ # (in case the default encoding was changed from UTF-8)
+ subprocess.call(['dropdb', 'warehouse_unittest'])
+ subprocess.check_call(['createdb', '-E', 'UTF8', 'warehouse_unittest'])
+ url = 'postgresql:///warehouse_unittest'
+
+ engine = create_engine(url, poolclass=AssertionPool)
+ request.addfinalizer(engine.dispose)
+ if not os.getenv('WAREHOUSE_DATABASE_URL'):
+ request.addfinalizer(lambda: subprocess.call(['dropdb', 'warehouse_unittest']))
+
+ # Connect to the database and create the necessary extensions
+ engine.execute('CREATE EXTENSION IF NOT EXISTS "citext"')
+ engine.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"')
+
+ # Have Alembic create the schema
alembic_cfg = alembic.config.Config()
alembic_cfg.set_main_option(
"script_location",
"warehouse:migrations",
)
- alembic_cfg.set_main_option("url", postgresql)
+ alembic_cfg.set_main_option("url", url)
alembic.command.upgrade(alembic_cfg, "head")
- return postgresql
-
-
-class FakeConnection:
-
- def __init__(self, connection):
- self.connection = connection
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, value, traceback):
- pass
-
- def __getattr__(self, name):
- return getattr(self.connection, name)
-
-
-class FakeEngine:
-
- def __init__(self, connection):
- self.connection = connection
-
- def __getattr__(self, name):
- return getattr(self.connection, name)
-
- def begin(self):
- return FakeConnection(self.connection)
-
- def connect(self):
- return FakeConnection(self.connection)
+ return engine
@pytest.fixture
def engine(request, database):
- engine = sqlalchemy.create_engine(
- database,
- poolclass=sqlalchemy.pool.AssertionPool,
- isolation_level="SERIALIZABLE",
- )
-
- connection = engine.connect()
- transaction = connection.begin()
-
- @request.addfinalizer
- def finalize():
- transaction.rollback()
- connection.close()
-
- return FakeEngine(connection)
+ connection = database.connect()
+ transaction = connection.begin_nested()
+ request.addfinalizer(transaction.rollback)
+ request.addfinalizer(connection.close)
+ return connection
class ErrorRedis:
-
def __init__(self, url):
self.url = url
@@ -226,7 +88,7 @@ def __getattr__(self, name):
@pytest.fixture
-def dbapp(database, engine):
+def dbapp(engine):
from warehouse.application import Warehouse
return Warehouse.from_yaml(
@@ -235,7 +97,6 @@ def dbapp(database, engine):
"access_token": "testing",
"hosts": "localhost",
},
- "database": {"url": database},
"redis": {
"downloads": "redis://nonexistant/0",
"sessions": "redis://nonexistant/0",
@@ -256,8 +117,6 @@ def connect():
"Cannot access the database through the app fixture"
)
- engine = pretend.stub(connect=connect)
-
return Warehouse.from_yaml(
override={
"site": {
@@ -271,6 +130,6 @@ def connect():
},
"search": {"hosts": []},
},
- engine=engine,
+ engine=pretend.stub(connect=connect, execute=connect),
redis_class=ErrorRedis,
)
diff --git a/tests/test_db.py b/tests/test_db.py
--- a/tests/test_db.py
+++ b/tests/test_db.py
@@ -46,12 +46,7 @@ def test_scalar(value, default, expected, eargs, ekwargs):
result = pretend.stub(scalar=pretend.call_recorder(lambda: value))
execute = pretend.call_recorder(lambda q, *a, **kw: result)
model = pretend.stub(
- engine=pretend.stub(
- connect=lambda: pretend.stub(
- __enter__=lambda: pretend.stub(execute=execute),
- __exit__=lambda *a, **k: None,
- ),
- ),
+ engine=pretend.stub(execute=execute)
)
sql = db.scalar("SELECT * FROM thing", default=default)
@@ -77,13 +72,9 @@ def test_scalar(value, default, expected, eargs, ekwargs):
def test_rows(row_func, value, expected, eargs, ekwargs):
execute = pretend.call_recorder(lambda q, *a, **kw: value)
model = pretend.stub(
- engine=pretend.stub(
- connect=lambda: pretend.stub(
- __enter__=lambda: pretend.stub(execute=execute),
- __exit__=lambda *a, **k: None,
- ),
- ),
+ engine=pretend.stub(execute=execute)
)
+
kwargs = {"row_func": row_func} if row_func else {}
sql = db.rows("SELECT * FROM thing", **kwargs)
@@ -129,13 +120,9 @@ def test_rows(row_func, value, expected, eargs, ekwargs):
def test_mapping(key_func, value_func, value, expected, eargs, ekwargs):
execute = pretend.call_recorder(lambda q, *a, **kw: value)
model = pretend.stub(
- engine=pretend.stub(
- connect=lambda: pretend.stub(
- __enter__=lambda: pretend.stub(execute=execute),
- __exit__=lambda *a, **k: None,
- ),
- ),
+ engine=pretend.stub(execute=execute)
)
+
kwargs = {}
if key_func:
kwargs["key_func"] = key_func
| Simplify PostgreSQL test setup
The testing setup seems overly complicated. A whole new postgresql instance is not needed for proper test isolation. The FakeEngine and FakeConnection classes also seem unnecessary since I've never needed anything like them in my own testing setup. In addition, the ongoing Flaskification will likely need its own changes.
| For what it's worth, I made the `FakeEngine` and `FakeConnection` classes just recently because I was using transactions in order to isolate each test case from each other. However! in order to actually implement that I was creating a connection in the engine fixture, opening a transaction, and registering a finalizer that rolled the transaction back (much like what you see already). I would then return the connection which dbapp thought was an engine and it'd use it as such. The problem turned out to be the fact that connections look like they support the same things, but they have one minor difference, if you reconnect (using `with realconnection.connect()`) you'll still be using the same underlying dbapi connection, but it "loses" the fact it's inside a transaction and will start auto commiting on changes again, leaking state between test cases.
So the `FakeEngine` and `FakeConnection` was how I tricked it into not doing that. So maybe there's a better way, I'm not really a SQLAlchemy expert but I just wanted to record _why_ I was doing that.
Don't worry, I know what I'm doing (most of the time anyway :)
| 2014-04-14T18:27:01Z | [] | [] |
pypi/warehouse | 286 | pypi__warehouse-286 | [
"269"
] | 3d7edd8a6a26e464d8d00aa5393cad824a604c92 | diff --git a/warehouse/legacy/pypi.py b/warehouse/legacy/pypi.py
--- a/warehouse/legacy/pypi.py
+++ b/warehouse/legacy/pypi.py
@@ -26,12 +26,37 @@
cache, cors, is_valid_json_callback_name, render_response,
)
+_action_methods = {}
+
+
+def register(name):
+ """Register a handler for a legacy :action style dispatch.
+
+ Most of the dispatch in legacy PyPI was implemented using a :action
+ parameter in the GET or POST arguments.
+
+ This doesn't actually decorate the function or alter it in any way, it
+ simply registers it with the legacy routing mapping.
+ """
+ if name in _action_methods:
+ raise KeyError('Attempt to re-register name %r' % (name, ))
+
+ def deco(fn):
+ _action_methods[name] = fn
+ return fn
+ return deco
+
def pypi(app, request):
# if the MIME type of the request is XML then we go into XML-RPC mode
if request.headers.get('Content-Type') == 'text/xml':
return xmlrpc.handle_request(app, request)
+ # check for the legacy :action-style dispatch
+ action = request.args.get(':action')
+ if action in _action_methods:
+ return _action_methods[action](app, request)
+
# no XML-RPC and no :action means we render the index, or at least we
# redirect to where it moved to
return redirect(
@@ -90,6 +115,7 @@ def project_json(app, request, project_name):
return response
+@register('rss')
@cache(browser=1, varnish=120)
@fastly.rss
def rss(app, request):
@@ -97,9 +123,10 @@ def rss(app, request):
"""
releases = app.db.packaging.get_recently_updated(num=40)
for release in releases:
- values = dict(project_name=release['name'], version=release['version'])
- url = app.urls.build('warehouse.packaging.views.project_detail',
- values, force_external=True)
+ # TODO update _force_external to _external when Flask-ification is done
+ url = url_for(request, 'warehouse.packaging.views.project_detail',
+ project_name=release['name'], version=release['version'],
+ _force_external=True)
release.update(dict(url=url))
response = render_response(
@@ -113,6 +140,7 @@ def rss(app, request):
return response
+@register('packages_rss')
@cache(browser=1, varnish=120)
@fastly.rss
def packages_rss(app, request):
@@ -120,9 +148,9 @@ def packages_rss(app, request):
"""
releases = app.db.packaging.get_recent_projects(num=40)
for release in releases:
- values = dict(project_name=release['name'])
- url = app.urls.build('warehouse.packaging.views.project_detail',
- values, force_external=True)
+ # TODO update _force_external to _external when Flask-ification is done
+ url = url_for(request, 'warehouse.packaging.views.project_detail',
+ project_name=release['name'], _force_external=True)
release.update(dict(url=url))
response = render_response(
| diff --git a/tests/legacy/test_pypi.py b/tests/legacy/test_pypi.py
--- a/tests/legacy/test_pypi.py
+++ b/tests/legacy/test_pypi.py
@@ -36,6 +36,7 @@ def test_pypi_index(content_type):
app = pretend.stub()
request = pretend.stub(
+ args={},
headers=headers,
url_adapter=pretend.stub(
build=pretend.call_recorder(
@@ -58,9 +59,39 @@ def test_pypi_index(content_type):
]
+def test_pypi_route_action(monkeypatch):
+ app = pretend.stub()
+ request = pretend.stub(
+ args={':action': 'test'},
+ headers={},
+ )
+
+ _action_methods = {}
+ monkeypatch.setattr(pypi, '_action_methods', _action_methods)
+
+ @pypi.register('test')
+ def test(app, request):
+ test.called = True
+ return 'success'
+
+ resp = pypi.pypi(app, request)
+
+ assert resp == 'success'
+ assert test.called
+
+
+def test_pypi_route_action_double(monkeypatch):
+ _action_methods = {'test': None}
+ monkeypatch.setattr(pypi, '_action_methods', _action_methods)
+
+ with pytest.raises(KeyError):
+ pypi.register('test')
+
+
def test_pypi_route_xmlrpc(monkeypatch):
app = pretend.stub()
request = pretend.stub(
+ args={},
headers={'Content-Type': 'text/xml'},
)
@@ -175,12 +206,13 @@ def test_rss(monkeypatch):
cache=pretend.stub(browser=False, varnish=False),
site={"url": "http://test.server/", "name": "PyPI"},
),
- urls=Map(urls.urls).bind('test.server', '/'),
templates=pretend.stub(
get_template=pretend.call_recorder(lambda t: template),
),
)
- request = pretend.stub()
+ request = pretend.stub(
+ url_adapter=Map(urls.urls).bind('test.server', '/'),
+ )
resp = pypi.rss(app, request)
@@ -228,12 +260,13 @@ def test_packages_rss(monkeypatch):
cache=pretend.stub(browser=False, varnish=False),
site={"url": "http://test.server/", "name": "PyPI"},
),
- urls=Map(urls.urls).bind('test.server', '/'),
templates=pretend.stub(
get_template=pretend.call_recorder(lambda t: template),
),
)
- request = pretend.stub()
+ request = pretend.stub(
+ url_adapter=Map(urls.urls).bind('test.server', '/'),
+ )
resp = pypi.packages_rss(app, request)
| Legacy RSS is not in URL dispatch
This is a generic issue that old PyPI ?:action=<name> methods are not hooked up at the moment. I'll add a dispatch table to warehouse.legacy.pypi and use that to register the rss (and other) methods.
| 2014-04-15T15:32:52Z | [] | [] |
|
pypi/warehouse | 289 | pypi__warehouse-289 | [
"122"
] | ebc4bab81657de26488de7400db7ac508a9e7870 | diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py
--- a/warehouse/packaging/views.py
+++ b/warehouse/packaging/views.py
@@ -92,11 +92,24 @@ def project_detail(app, request, project_name, version=None):
# Mark our description_html as safe as it's already been cleaned by bleach
description_html = jinja2.Markup(description_html)
+ # Split the requirement string to (project name, the rest)
+ requirements = []
+ for req in release.get('requires_dist', []):
+ project_name, *other = req.split(' ', 1)
+ url = url_for(request, 'warehouse.packaging.views.project_detail',
+ project_name=project_name)
+ requirements.append({
+ 'project_name': project_name,
+ 'project_url': url,
+ 'other': other[0] if other else ''
+ })
+
return render_response(
app, request, "projects/detail.html",
project=project,
release=release,
releases=releases,
+ requirements=requirements,
description_html=description_html,
download_counts=app.db.packaging.get_download_counts(project),
downloads=app.db.packaging.get_downloads(project, version),
| diff --git a/tests/packaging/test_views.py b/tests/packaging/test_views.py
--- a/tests/packaging/test_views.py
+++ b/tests/packaging/test_views.py
@@ -224,6 +224,7 @@ def test_project_detail_invalid_version():
def test_project_detail_valid(version, description, camo):
release = {
"description": description,
+ "requires_dist": ["foo", "xyz > 0.1"]
}
template = pretend.stub(
@@ -269,7 +270,10 @@ def test_project_detail_valid(version, description, camo):
get_template=pretend.call_recorder(lambda t: template),
),
)
- request = pretend.stub()
+ request = pretend.stub(
+ url_adapter=pretend.stub(build=lambda *a,
+ **kw: "/projects/test-project/")
+ )
project_name = "test-project"
normalized = "test-project"
| Link dependency names to the actual dependency
When viewing dependencies on the project page, the names should be a link that take you to that dependency's project page.
| 2014-04-15T17:27:12Z | [] | [] |
|
pypi/warehouse | 297 | pypi__warehouse-297 | [
"251"
] | 1573a579ddb05c7e1d9f8dbe9d23318f56968df6 | diff --git a/warehouse/legacy/pypi.py b/warehouse/legacy/pypi.py
--- a/warehouse/legacy/pypi.py
+++ b/warehouse/legacy/pypi.py
@@ -76,7 +76,7 @@ def daytime(app, request):
@cors
@cache(browser=1, varnish=120)
@fastly.projects(project_name="project")
-def project_json(app, request, project_name):
+def project_json(app, request, project_name, version=None):
# fail early if callback is invalid
callback = request.args.get('callback')
if callback:
@@ -91,9 +91,12 @@ def project_json(app, request, project_name):
# we're looking for the latest version
versions = app.db.packaging.get_project_versions(project)
- if not versions:
- raise NotFound("{} has no releases".format(project))
- version = versions[0]
+ if version is None:
+ if not versions:
+ raise NotFound("{} has no releases".format(project_name))
+ version = versions[0]
+ elif version not in versions:
+ raise NotFound("{} has no release {}".format(project_name, version))
rpc = xmlrpc.Interface(app, request)
diff --git a/warehouse/legacy/urls.py b/warehouse/legacy/urls.py
--- a/warehouse/legacy/urls.py
+++ b/warehouse/legacy/urls.py
@@ -26,6 +26,8 @@
Rule("/pypi", methods=["GET", "POST"], endpoint="pypi"),
Rule("/pypi/<project_name>/json", methods=["GET"],
endpoint="project_json"),
+ Rule("/pypi/<project_name>/<version>/json", methods=["GET"],
+ endpoint="project_json"),
Rule("/daytime", methods=["GET"], endpoint="daytime"),
]),
]
| diff --git a/tests/legacy/test_pypi.py b/tests/legacy/test_pypi.py
--- a/tests/legacy/test_pypi.py
+++ b/tests/legacy/test_pypi.py
@@ -120,8 +120,13 @@ def test_daytime(monkeypatch):
assert resp.response[0] == b'19700101T00:00:00\n'
-@pytest.mark.parametrize("callback", [None, 'yes'])
-def test_json(monkeypatch, callback):
+@pytest.mark.parametrize(("version", "callback"), [
+ (None, None),
+ (None, 'yes'),
+ ('1.0', 'yes'),
+ ('1.0', None),
+])
+def test_json(monkeypatch, version, callback):
get_project = pretend.call_recorder(lambda n: 'spam')
get_project_versions = pretend.call_recorder(lambda n: ['2.0', '1.0'])
app = pretend.stub(
@@ -149,12 +154,13 @@ def test_json(monkeypatch, callback):
monkeypatch.setattr(xmlrpc, 'Interface', Interface)
- resp = pypi.project_json(app, request, project_name='spam')
+ resp = pypi.project_json(app, request, project_name='spam',
+ version=version)
assert get_project.calls == [pretend.call('spam')]
assert get_project_versions.calls == [pretend.call('spam')]
- assert release_data.calls == [pretend.call('spam', '2.0')]
- assert release_urls.calls == [pretend.call('spam', '2.0')]
+ assert release_data.calls == [pretend.call('spam', version or '2.0')]
+ assert release_urls.calls == [pretend.call('spam', version or '2.0')]
expected = '{"info": {"some": "data"}, "urls": [{"some": "url", '\
'"upload_time": "1970-01-01T00:00:00"}]}'
if callback:
@@ -169,8 +175,12 @@ def test_jsonp_invalid():
pypi.project_json(app, request, project_name='spam')
-@pytest.mark.parametrize("project", [None, pretend.stub(name="spam")])
-def test_json_missing(monkeypatch, project):
+@pytest.mark.parametrize(("project", "version"), [
+ (None, None),
+ (pretend.stub(name="spam"), None),
+ (pretend.stub(name="spam"), '1'),
+])
+def test_json_missing(monkeypatch, project, version):
get_project = pretend.call_recorder(lambda n: project)
get_project_versions = pretend.call_recorder(lambda n: [])
app = pretend.stub(
@@ -184,7 +194,7 @@ def test_json_missing(monkeypatch, project):
request = pretend.stub(args={})
with pytest.raises(NotFound):
- pypi.project_json(app, request, project_name='spam')
+ pypi.project_json(app, request, project_name='spam', version=version)
def test_rss(monkeypatch):
| Legacy JSON doesn't have the version specific JSON endpoint
PyPI legacy supports version specific JSON via urls like `/pypi/Django/1.5.5/json` however Warehouse doesn't support this.
| 2014-04-16T17:33:19Z | [] | [] |
|
pypi/warehouse | 300 | pypi__warehouse-300 | [
"149"
] | c9871d51f3d68067bfd735dda4e1a4a359c39613 | diff --git a/warehouse/packaging/db.py b/warehouse/packaging/db.py
--- a/warehouse/packaging/db.py
+++ b/warehouse/packaging/db.py
@@ -83,6 +83,13 @@ class Database(db.Database):
"""
)
+ get_reverse_dependencies = db.rows(
+ """ SELECT DISTINCT name
+ FROM release_dependencies
+ WHERE specifier LIKE %s
+ """
+ )
+
get_changed_since = db.rows(
""" SELECT name, max(submitted_date) FROM journals
WHERE submitted_date > %s
diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py
--- a/warehouse/packaging/views.py
+++ b/warehouse/packaging/views.py
@@ -74,6 +74,15 @@ def project_detail(app, request, project_name, version=None):
# Get the release data for the version
release = app.db.packaging.get_release(project, version)
+ # Get the number of reverse dependencies for this project
+ project_url = lambda name: url_for(
+ request, 'warehouse.packaging.views.project_detail',
+ project_name=name)
+ reverse_dependencies = [
+ {'name': row['name'], 'url': project_url(row['name'])}
+ for row in app.db.packaging.get_reverse_dependencies(project + ' %')
+ ]
+
if release.get("description"):
# Render the project description
description_html, rendered = readme.rst.render(release["description"])
@@ -111,6 +120,7 @@ def project_detail(app, request, project_name, version=None):
release=release,
releases=releases,
requirements=requirements,
+ reverse_dependencies=reverse_dependencies,
description_html=description_html,
download_counts=app.db.packaging.get_download_counts(project),
downloads=app.db.packaging.get_downloads(project, version),
| diff --git a/tests/packaging/test_views.py b/tests/packaging/test_views.py
--- a/tests/packaging/test_views.py
+++ b/tests/packaging/test_views.py
@@ -271,6 +271,9 @@ def test_project_detail_valid(app, version, description, html, camo):
"last_month": 30,
},
),
+ get_reverse_dependencies=pretend.call_recorder(
+ lambda proj: [{'name': 'foo'}, {'name': 'bar'}]
+ ),
get_downloads=pretend.call_recorder(lambda proj, ver: []),
get_classifiers=pretend.call_recorder(lambda proj, ver: []),
get_documentation_url=pretend.call_recorder(
@@ -314,6 +317,10 @@ def test_project_detail_valid(app, version, description, html, camo):
"project": "test-project",
"release": release,
"releases": [{"version": "2.0"}, {"version": "1.0"}],
+ "reverse_dependencies": [
+ {'name': 'foo', 'url': '/projects/test-project/'},
+ {'name': 'bar', 'url': '/projects/test-project/'}
+ ],
"requirements": [
{
"project_name": "foo",
@@ -337,3 +344,6 @@ def test_project_detail_valid(app, version, description, html, camo):
assert app.db.packaging.get_users_for_project.calls == [
pretend.call("test-project"),
]
+ assert app.db.packaging.get_reverse_dependencies.calls == [
+ pretend.call("test-project %"),
+ ]
| Show reverse dependencies on the package page
When using wheel and twine to upload a package, PyPI and Warehouse will have information about the dependencies of said package. It would be very nice to be able to look up the reverse relationship, the dependents, on the package's listing in Warehouse.
| Talked about this with Richard, agreed to add a reverse dependency counter to the project details page along with a link to a dedicated page that lists the reverse dependencies.
Sounds reasonable to me. Another option is a modal like the hash func use.
> On Apr 16, 2014, at 4:27 PM, Alex Grönholm notifications@github.com wrote:
>
> Talked about this with Richard, agreed to add a reverse dependency counter to the project details page along with a link to a dedicated page that lists the reverse dependencies.
>
> —
> Reply to this email directly or view it on GitHub.
Sounds interesting, I'll have a look at it.
There's a catch here too. The project details project is specific to one version of the project, so the reverse dependencies' specifiers should be respected. At least eventually.
| 2014-04-16T23:49:01Z | [] | [] |
pypi/warehouse | 312 | pypi__warehouse-312 | [
"299"
] | 134bcfbf1fce2264a3ebb3d4ebe16b0a1bc3102a | diff --git a/warehouse/db.py b/warehouse/db.py
--- a/warehouse/db.py
+++ b/warehouse/db.py
@@ -43,6 +43,22 @@ def inner(model, *args, **kwargs):
return inner
+def first(query, default=None):
+ """
+ A helper function that takes a query and returns a function that will query
+ the database and return the first row
+ """
+ def inner(model, *args, **kwargs):
+ val = model.engine.execute(query, *args, **kwargs).first()
+
+ if default is not None and val is None:
+ return default
+ else:
+ return val
+
+ return inner
+
+
def rows(query, row_func=dict):
"""
A helper function that takes a query and returns a function that will query
@@ -67,3 +83,21 @@ def inner(model, *args, **kwargs):
}
return inner
+
+
+def validate_argument_column_mapping(argument_dict, table,
+ blacklist=None):
+ """
+ Validate that the keys of the argument_dict passed match columns in table
+ that are not in the blacklist list.
+
+ return TypeError if there is a key where this condition is not met.
+ """
+ if blacklist is None:
+ blacklist = []
+ columns = set((c.key for c in table.columns if c.key not in blacklist))
+ for argument_name in argument_dict:
+ if argument_name not in columns:
+ raise TypeError("Key {0} does not match a column in {1}".format(
+ argument_name, table.name
+ ))
diff --git a/warehouse/fastly.py b/warehouse/fastly.py
--- a/warehouse/fastly.py
+++ b/warehouse/fastly.py
@@ -14,14 +14,14 @@
import functools
import string
-from warehouse.utils import normalize
+from warehouse.utils import normalize_project_name
class FastlyFormatter(string.Formatter):
def convert_field(self, value, conversion):
if conversion == "n":
- return normalize(value)
+ return normalize_project_name(value)
return super(FastlyFormatter, self).convert_field(value, conversion)
diff --git a/warehouse/legacy/pypi.py b/warehouse/legacy/pypi.py
--- a/warehouse/legacy/pypi.py
+++ b/warehouse/legacy/pypi.py
@@ -90,7 +90,7 @@ def project_json(app, request, project_name, version=None):
raise NotFound("{} does not exist".format(project_name))
# we're looking for the latest version
- versions = app.db.packaging.get_project_versions(project)
+ versions = app.db.packaging.get_project_versions(project['name'])
if version is None:
if not versions:
raise NotFound("{} has no releases".format(project_name))
@@ -101,9 +101,9 @@ def project_json(app, request, project_name, version=None):
rpc = xmlrpc.Interface(app, request)
d = dict(
- info=rpc.release_data(project, version),
- urls=rpc.release_urls(project, version),
- releases=rpc.all_release_urls(project),
+ info=rpc.release_data(project['name'], version),
+ urls=rpc.release_urls(project['name'], version),
+ releases=rpc.all_release_urls(project['name']),
)
time_format = '%Y-%m-%dT%H:%M:%S'
for url in d['urls']:
diff --git a/warehouse/legacy/simple.py b/warehouse/legacy/simple.py
--- a/warehouse/legacy/simple.py
+++ b/warehouse/legacy/simple.py
@@ -50,10 +50,10 @@ def project(app, request, project_name):
raise NotFound("{} does not exist".format(project_name))
# Generate the Package URLs for the packages we've hosted
- file_urls = app.db.packaging.get_file_urls(project)
+ file_urls = app.db.packaging.get_file_urls(project['name'])
# Determine what the hosting mode is for this package
- hosting_mode = app.db.packaging.get_hosting_mode(project)
+ hosting_mode = app.db.packaging.get_hosting_mode(project['name'])
project_urls = []
if hosting_mode in {"pypi-scrape-crawl", "pypi-scrape"}:
@@ -62,7 +62,7 @@ def project(app, request, project_name):
download_rel = "{}download".format(rel_prefix)
# Generate the Homepage and Download URL links
- release_urls = app.db.packaging.get_release_urls(project)
+ release_urls = app.db.packaging.get_release_urls(project['name'])
for version, (home_page, download_url) in release_urls.items():
if home_page and home_page != "UNKNOWN":
project_urls.append({
@@ -79,25 +79,25 @@ def project(app, request, project_name):
})
# Fetch the explicitly provided URLs
- external_urls = app.db.packaging.get_external_urls(project)
+ external_urls = app.db.packaging.get_external_urls(project['name'])
resp = render_response(
app, request,
"legacy/simple/detail.html",
- project=project,
+ project=project['name'],
files=file_urls,
project_urls=project_urls,
external_urls=external_urls,
)
# Add a header that points to the last serial
- serial = app.db.packaging.get_last_serial(project)
+ serial = app.db.packaging.get_last_serial(project['name'])
resp.headers.add("X-PyPI-Last-Serial", serial)
# Add a Link header to point at the canonical URL
can_url = url_for(
request, "warehouse.legacy.simple.project",
- project_name=project,
+ project_name=project['name'],
_force_external=True,
)
resp.headers.add("Link", "<" + can_url + ">", rel="canonical")
diff --git a/warehouse/packaging/db.py b/warehouse/packaging/db.py
--- a/warehouse/packaging/db.py
+++ b/warehouse/packaging/db.py
@@ -16,9 +16,16 @@
import logging
import os.path
import urllib.parse
+import pkg_resources
+import readme.rst
+
+from collections import defaultdict
from warehouse import db
-from warehouse.packaging.tables import ReleaseDependencyKind
+from warehouse import utils
+from warehouse.packaging.tables import (ReleaseDependencyKind,
+ packages,
+ releases)
log = logging.getLogger(__name__)
@@ -83,13 +90,6 @@ class Database(db.Database):
"""
)
- get_reverse_dependencies = db.rows(
- """ SELECT DISTINCT name
- FROM release_dependencies
- WHERE specifier LIKE %s
- """
- )
-
get_changed_since = db.rows(
""" SELECT name, max(submitted_date) FROM journals
WHERE submitted_date > %s
@@ -116,8 +116,8 @@ def get_top_projects(self, num=None):
return [tuple(r) for r in self.engine.execute(query, limit=num)]
- get_project = db.scalar(
- """ SELECT name
+ get_project = db.first(
+ """ SELECT *
FROM packages
WHERE normalized_name = lower(
regexp_replace(%s, '_', '-', 'ig')
@@ -186,6 +186,19 @@ def get_top_projects(self, num=None):
value_func=lambda r: (r["home_page"], r["download_url"]),
)
+ def get_release_dependencies(self, project_name, version):
+ query = \
+ """
+ SELECT * FROM release_dependencies
+ WHERE name = %(name)s
+ AND version = %(version)s
+ """
+ specifier_dict = defaultdict(set)
+ for row in self.engine.execute(query, name=project_name,
+ version=version):
+ specifier_dict[row['kind']].add(row['specifier'])
+ return specifier_dict
+
get_external_urls = db.rows(
""" SELECT DISTINCT ON (url) url
FROM description_urls
@@ -195,6 +208,16 @@ def get_top_projects(self, num=None):
row_func=lambda r: r["url"]
)
+ get_release_external_urls = db.rows(
+ """ SELECT DISTINCT ON (url) url
+ FROM description_urls
+ WHERE name = %s
+ AND version = %s
+ ORDER BY url
+ """,
+ row_func=lambda r: r["url"]
+ )
+
get_file_urls = db.rows(
""" SELECT name, filename, python_version, md5_digest
FROM release_files
@@ -306,7 +329,11 @@ def get_release(self, project, version):
dict(r)
for r in self.engine.execute(query, project=project,
version=version)
- ][0]
+ ]
+ if len(result) == 0:
+ return None
+ else:
+ result = result[0]
# Load dependency information
query = \
@@ -515,3 +542,295 @@ def get_documentation_url(self, project):
ORDER BY submitted_date DESC
"""
)
+
+# data Modification Methods
+
+ def upsert_project(self, name, username, user_ip, **additional_columns):
+ # NOTE: pypi behaviour is to assign the first submitter of a
+ # project the "owner" role. this code does not
+ # perform that behaviour (implement in the view instead)
+ db.validate_argument_column_mapping(additional_columns, packages)
+
+ existing_project = self.get_project(name)
+
+ if existing_project:
+ message = "updating project {0}".format(existing_project['name'])
+ query = (packages.update()
+ .where(packages.c.name == existing_project['name']))
+ else:
+ message = "create"
+ query = packages.insert()
+
+ self.engine.execute(query.values(
+ name=name,
+ normalized_name=utils.normalize_project_name(name),
+ **additional_columns
+ ))
+
+ self._insert_journal_entry(name, None, message, username, user_ip)
+
+ def delete_project(self, name):
+ for release in self.get_releases(name):
+ self.delete_release(name, release['version'])
+ self.engine.execute("DELETE FROM packages WHERE name = %(name)s",
+ name=name)
+
+ def upsert_release(self, project_name, version, username, user_ip,
+ classifiers=None, release_dependencies=None,
+ description=None, **additional_db_values):
+ """
+ Takes in the following:
+
+ * project_name: the name of the package to insert
+ * version: the version of the package to insert
+ * username: username of the user upserting the package
+ * user_ip: ip address of the user upserting the package
+ * classifiers: a list of the classifiers to classify the release with
+ * release_dependencies: a dictionary of
+ 'ReleaseDependencyKind.value: [specifier]' pairs.
+ * description: a restructured text description of the release/project
+ * additional_db_values: any other column in the release table,
+ as specified by get_settable_release_columns
+
+ and inserts the release (if one doesn't exist), or updates otherwise
+ """
+ is_update = self.get_release(project_name, version) is not None
+ modified_elements = list(additional_db_values.keys())
+
+ db.validate_argument_column_mapping(
+ additional_db_values,
+ releases,
+ blacklist=['name', 'version', 'description', 'description_html',
+ '_pypi_ordering', '_pypi_hidden']
+ )
+
+ if not is_update:
+ additional_db_values['name'] = project_name
+ additional_db_values['version'] = version
+
+ if description:
+ modified_elements += ['description', 'description_html']
+ additional_db_values['description'] = description
+ additional_db_values['description_html'] = \
+ readme.rst.render(description)[0]
+
+ if len(additional_db_values) > 0:
+ if is_update:
+ self.engine.execute(
+ releases
+ .update()
+ .where(releases.columns.name == project_name)
+ .where(releases.columns.version == version)
+ .values(**additional_db_values)
+ )
+ else:
+ self.engine.execute(
+ releases.insert().values(**additional_db_values)
+ )
+
+ # external tables
+
+ # this is legacy behavior. According to PEP-438, we should
+ # no longer support parsing urls from descriptions
+ hosting_mode = self.get_hosting_mode(project_name)
+ if hosting_mode in ('pypi-scrape-crawl', 'pypi-scrape') \
+ and 'description_html' in additional_db_values:
+
+ self.update_release_external_urls(
+ project_name, version,
+ utils.find_links_from_html(
+ additional_db_values['description_html']
+ )
+ )
+
+ if classifiers:
+ modified_elements.append('classifiers')
+ self.update_release_classifiers(project_name, version,
+ classifiers)
+
+ if release_dependencies:
+ self.update_release_dependencies(project_name, version,
+ release_dependencies)
+
+ if is_update:
+ journal_message = 'update {0}'.format(','.join(modified_elements))
+ else:
+ journal_message = "new release"
+
+ self._insert_journal_entry(project_name, version, journal_message,
+ username, user_ip)
+
+ # insert specific actions
+
+ if not is_update:
+ self._update_release_ordering(project_name)
+
+ def delete_release(self, project_name, version):
+ # delete FK rows first
+ for kind in ReleaseDependencyKind:
+ self._delete_release_dependencies_of_kind(project_name,
+ version, kind.value)
+
+ self._delete_release_classifiers(project_name, version)
+ self._delete_release_external_urls(project_name, version)
+
+ # actual deletion from the release table
+ delete_statement = \
+ """
+ DELETE FROM releases
+ WHERE name = %(name)s
+ AND version = %(version)s
+ """
+ self.engine.execute(
+ delete_statement,
+ name=project_name,
+ version=version
+ )
+
+ def _update_release_ordering(self, project_name):
+ query = \
+ """
+ SELECT version, _pypi_ordering
+ FROM releases
+ WHERE name = %(name)s
+ """
+ project_versions = [project for project in
+ self.engine.execute(query, name=project_name)]
+ sorted_versions = sorted(
+ project_versions,
+ key=(lambda x: pkg_resources.parse_version(x[0]))
+ )
+ query = \
+ """
+ UPDATE releases
+ SET _pypi_ordering = %(order)s
+ WHERE name = %(name)s
+ AND version = %(version)s
+ """
+ for order, (version, current) in enumerate(sorted_versions):
+ if current != order:
+ self.engine.execute(query, name=project_name,
+ order=order, version=version)
+
+ def update_release_classifiers(self, name, version, classifiers):
+ self._delete_release_classifiers(name, version)
+ insert_query = \
+ """ INSERT INTO release_classifiers
+ (name, version, trove_id)
+ VALUES
+ (%(name)s, %(version)s, %(trove_id)s)
+ """
+ classifier_id_dict = self.get_classifier_ids(classifiers)
+ for classifier in classifiers:
+ trove_id = classifier_id_dict[classifier]
+ self.engine.execute(insert_query, name=name,
+ version=version, trove_id=trove_id)
+
+ def _delete_release_classifiers(self, name, version):
+ query = \
+ """ DELETE FROM release_classifiers
+ WHERE name = %(name)s
+ AND version = %(version)s
+ """
+ self.engine.execute(
+ query,
+ name=name,
+ version=version
+ )
+
+ def update_release_external_urls(self, project_name, version, urls):
+ self._delete_release_external_urls(project_name, version)
+ insert_query = \
+ """ INSERT INTO description_urls
+ (name, version, url)
+ VALUES
+ (%(name)s, %(version)s, %(url)s)
+ """
+ for url in urls:
+ self.engine.execute(insert_query, name=project_name,
+ version=version, url=url)
+
+ def _delete_release_external_urls(self, project_name, version):
+ query = \
+ """ DELETE FROM description_urls
+ WHERE name = %(name)s
+ AND version = %(version)s
+ """
+ self.engine.execute(
+ query,
+ name=project_name,
+ version=version
+ )
+
+ def update_release_dependencies(self, project_name, version,
+ specifier_dict):
+ """
+ Takes in a project_name, version, and a release_dict of the format:
+ { ReleaseDependencyKind: [specifier_name_foo, specifier_name_bar] }
+
+ and updates the release dependencies with the desired table
+ """
+ insert_query = \
+ """
+ INSERT INTO release_dependencies
+ (name, version, kind, specifier)
+ VALUES
+ (%(name)s, %(version)s, %(kind)s, %(specifier)s)
+ """
+
+ old_specifier = self.get_release_dependencies(project_name,
+ version)
+ for kind, specifiers in specifier_dict.items():
+
+ # no need to update if the state is already there
+ if kind in old_specifier and specifiers == old_specifier[kind]:
+ continue
+
+ self._delete_release_dependencies_of_kind(project_name, version,
+ kind)
+ for specifier in specifiers:
+ self.engine.execute(
+ insert_query,
+ name=project_name,
+ version=version,
+ kind=kind,
+ specifier=specifier
+ )
+
+ def _delete_release_dependencies_of_kind(self, project_name, version,
+ kind):
+ query = \
+ """
+ DELETE FROM release_dependencies
+ WHERE name = %(name)s
+ AND version = %(version)s
+ AND kind = %(kind)s
+ """
+ self.engine.execute(
+ query,
+ name=project_name,
+ version=version,
+ kind=kind
+ )
+
+ def _insert_journal_entry(self, project_name, version, message,
+ username, userip):
+ date = datetime.datetime.now()
+ query = \
+ """
+ INSERT INTO journals
+ (name, version, action, submitted_date,
+ submitted_by, submitted_from)
+ VALUES
+ (%(name)s, %(version)s, %(action)s, %(submitted_date)s,
+ %(submitted_by)s, %(submitted_from)s)
+ """
+ self.engine.execute(
+ query,
+ name=project_name,
+ version=version,
+ action=message,
+ submitted_date=date.strftime('%Y-%m-%d %H:%M:%S'),
+ submitted_by=username,
+ submitted_from=userip
+ )
diff --git a/warehouse/packaging/tables.py b/warehouse/packaging/tables.py
--- a/warehouse/packaging/tables.py
+++ b/warehouse/packaging/tables.py
@@ -35,7 +35,7 @@ class ReleaseDependencyKind(int, enum.Enum):
obsoletes_dist = 6 # Unused
requires_external = 7 # Unused
- # WHY
+ # WHY +1
project_url = 8
diff --git a/warehouse/utils.py b/warehouse/utils.py
--- a/warehouse/utils.py
+++ b/warehouse/utils.py
@@ -130,8 +130,23 @@ def redirect_next(request, default="/", field_name="next", code=303):
return redirect(next, code=code)
-def normalize(value):
- return re.sub("_", "-", value, re.I).lower()
+PACKAGE_REGEX = {
+ "permitted_characters": re.compile("^[a-zA-Z0-9_\-.]+$"),
+ "start_and_end_with_ascii": re.compile("^[a-zA-Z0-9].*[a-zA-Z0-9]$"),
+}
+
+
+def normalize_project_name(name):
+ """
+ Normalizes a package name as per PEP-426
+ """
+ name = re.sub("_", "-", name).lower()
+ if not PACKAGE_REGEX["permitted_characters"].match(name):
+ raise ValueError("name contains illegal characters! (See PEP-426)")
+ if not PACKAGE_REGEX["start_and_end_with_ascii"].match(name):
+ raise ValueError("Distribution names MUST start and end with " +
+ "an ASCII letter or digit (See PEP-426)")
+ return name
class SearchPagination(object):
@@ -282,3 +297,14 @@ def is_safe_url(url, host):
return ((not parsed.netloc or parsed.netloc == host) and
(not parsed.scheme or parsed.scheme in ["http", "https"]))
+
+
+def find_links_from_html(html_body):
+ """
+ Return a list of links, extracted from all <a href="{{ url
+ }}">...</a> elements found.
+ """
+ document = html5lib.parse(html_body)
+ return [a.attrib.get('href', None)
+ for a in document.iter("{http://www.w3.org/1999/xhtml}a")
+ if 'href' in a.attrib]
| diff --git a/tests/accounts/test_db.py b/tests/accounts/test_db.py
--- a/tests/accounts/test_db.py
+++ b/tests/accounts/test_db.py
@@ -20,25 +20,6 @@
from warehouse.accounts.tables import users, emails
-@pytest.fixture
-def user(request, dbapp):
-
- @request.addfinalizer
- def delete_user():
- dbapp.db.accounts.delete_user(username)
-
- username = "guidovanrossum"
- email = "notanemail@python.org"
- password = "plaintextpasswordsaregreat"
- dbapp.db.accounts.insert_user(
- username,
- email,
- password)
- return_value = dbapp.db.accounts.get_user(username)
- return_value['password'] = password
- return return_value
-
-
def test_get_user(dbapp):
dbapp.engine.execute(users.insert().values(
password="!",
diff --git a/tests/conftest.py b/tests/conftest.py
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -137,3 +137,43 @@ def connect():
engine=pretend.stub(connect=connect, execute=connect),
redis_class=ErrorRedis,
)
+
+
+@pytest.fixture
+def user(dbapp):
+
+ username = "guidovanrossum"
+ email = "notanemail@example.org"
+ password = "plaintextpasswordsaregreat"
+ dbapp.db.accounts.insert_user(
+ username,
+ email,
+ password)
+ return_value = dbapp.db.accounts.get_user(username)
+ return_value['password'] = password
+ return return_value
+
+
+@pytest.fixture
+def project(user, dbapp):
+ project_name = "fooproject"
+
+ dbapp.db.packaging.upsert_project(project_name, user['username'],
+ '0.0.0.0')
+ return {
+ "name": project_name
+ }
+
+
+@pytest.fixture
+def release(user, dbapp, project):
+ version = '1.0'
+
+ dbapp.db.packaging.upsert_release(
+ project['name'], version, user['username'], '0.0.0.0',
+ description="this is a dummy package"
+ )
+ return {
+ 'project': project,
+ 'version': version
+ }
diff --git a/tests/legacy/test_pypi.py b/tests/legacy/test_pypi.py
--- a/tests/legacy/test_pypi.py
+++ b/tests/legacy/test_pypi.py
@@ -125,7 +125,7 @@ def test_daytime(monkeypatch):
('1.0', None),
])
def test_json(monkeypatch, version, callback):
- get_project = pretend.call_recorder(lambda n: 'spam')
+ get_project = pretend.call_recorder(lambda n: {'name': 'spam'})
get_project_versions = pretend.call_recorder(lambda n: ['2.0', '1.0'])
get_last_serial = pretend.call_recorder(lambda *n: 42)
app = pretend.stub(
@@ -191,7 +191,8 @@ def test_jsonp_invalid():
(pretend.stub(name="spam"), '1'),
])
def test_json_missing(monkeypatch, project, version):
- get_project = pretend.call_recorder(lambda n: project)
+ return_value = {'name': project} if project else None
+ get_project = pretend.call_recorder(lambda n: return_value)
get_project_versions = pretend.call_recorder(lambda n: [])
app = pretend.stub(
db=pretend.stub(
diff --git a/tests/legacy/test_simple.py b/tests/legacy/test_simple.py
--- a/tests/legacy/test_simple.py
+++ b/tests/legacy/test_simple.py
@@ -79,7 +79,9 @@ def test_project(app, monkeypatch,
app.db = pretend.stub(
packaging=pretend.stub(
- get_project=pretend.call_recorder(lambda p: project_name),
+ get_project=pretend.call_recorder(
+ lambda p: {'name': project_name},
+ ),
get_file_urls=pretend.call_recorder(lambda p: []),
get_hosting_mode=pretend.call_recorder(
lambda p: hosting_mode,
diff --git a/tests/packaging/test_db.py b/tests/packaging/test_db.py
--- a/tests/packaging/test_db.py
+++ b/tests/packaging/test_db.py
@@ -22,7 +22,7 @@
from warehouse.packaging.db import log
from warehouse.packaging.tables import (
packages, releases, release_files, description_urls, journals, classifiers,
- release_classifiers, release_dependencies, roles,
+ release_classifiers, release_dependencies, roles, ReleaseDependencyKind
)
@@ -502,7 +502,7 @@ def test_get_project(name, normalized, dbapp):
packages.insert().values(name=name, normalized_name=normalized)
)
- assert dbapp.db.packaging.get_project(normalized) == name
+ assert dbapp.db.packaging.get_project(normalized)['name'] == name
def test_get_project_missing(dbapp):
@@ -895,6 +895,10 @@ def test_get_project_versions(dbapp):
["4.0", "3.0", "2.0", "1.0"]
+def test_get_release_missing_project(dbapp):
+ assert not dbapp.db.packaging.get_release("foo", "1.2.3")
+
+
def test_get_release(dbapp):
created = datetime.datetime.utcnow()
@@ -1426,3 +1430,265 @@ def test_get_full_latest_releases(dbapp):
),
}
]
+
+
+def test_insert_delete_project(dbapp, user):
+ project_name = 'fooproject'
+ dbapp.db.packaging.upsert_project(project_name, user['username'],
+ '0.0.0.0')
+ assert dbapp.db.packaging.get_project(project_name)
+ dbapp.db.packaging.delete_project(project_name)
+ assert not dbapp.db.packaging.get_project(project_name)
+
+
+def test_upsert_project(dbapp, user, project):
+ bugtrack_url = "http://bugtrack.example.com"
+ dbapp.db.packaging.upsert_project(project['name'], user['username'],
+ '0.0.0.0',
+ bugtrack_url=bugtrack_url)
+ project = dbapp.db.packaging.get_project(project['name'])
+ assert project['bugtrack_url'] == bugtrack_url
+
+
+def test_upsert_project_bad_column(dbapp, user):
+ with pytest.raises(TypeError):
+ dbapp.db.packaging.upsert_project(
+ "badproject",
+ user['username'],
+ "0.0.0.0",
+ badcolumn="this is a bad column"
+ )
+
+
+def test_delete_project(dbapp, project, release):
+ dbapp.db.packaging.delete_project(
+ project['name']
+ )
+ assert not dbapp.db.packaging.get_project(project['name'])
+
+
+def test_upsert_release(dbapp, user, project):
+ version = '1.0'
+ dbapp.db.packaging.upsert_release(
+ project["name"], version, user["username"], "0.0.0.0"
+ )
+ assert dbapp.db.packaging.get_release(project['name'], version)
+
+
+def test_delete_release(dbapp, project, release):
+ dbapp.db.packaging.delete_release(
+ project['name'], release['version']
+ )
+ assert not dbapp.db.packaging.get_release(project['name'],
+ release['version'])
+
+
+def test_upsert_release_ordering(dbapp, user, project):
+ older_version = '1.0'
+ dbapp.db.packaging.upsert_release(
+ project["name"], older_version, user["username"], "0.0.0.0"
+ )
+
+ newer_version = '1.1'
+ dbapp.db.packaging.upsert_release(
+ project["name"], newer_version, user["username"], "0.0.0.0"
+ )
+
+ versions = dbapp.db.packaging.get_project_versions(
+ project['name']
+ )
+
+ assert versions == [newer_version, older_version]
+
+
+def test_upsert_release_full(dbapp, user, project):
+ """ test the updating of all of upsert """
+
+ # setup
+ dbapp.engine.execute(classifiers.insert().values(
+ id=1,
+ classifier="foo"
+ ))
+ release_dependencies = {
+ ReleaseDependencyKind.requires_dist.value: ('foo',)
+ }
+
+ # test
+ version = '1.0'
+ dbapp.db.packaging.upsert_release(
+ project['name'], version, user['username'], '0.0.0.0',
+ classifiers=('foo',),
+ release_dependencies=release_dependencies
+ )
+
+ assert dbapp.db.packaging.get_release(project['name'], version)
+
+ assert set(dbapp.db.packaging.get_classifiers(
+ project['name'],
+ version)
+ ) == set(('foo',))
+
+ assert set(dbapp.db.packaging.get_classifiers(
+ project['name'], version
+ )) == set(('foo',))
+
+
+def test_upsert_release_parse_description(dbapp, user, project):
+ dbapp.db.packaging.upsert_project(
+ project['name'],
+ user['username'],
+ '0.0.0.0',
+ hosting_mode="pypi-scrape-crawl"
+ )
+
+ example_url = "http://description.example.com"
+ dbapp.db.packaging.upsert_release(
+ project["name"], "1.0", user["username"], "0.0.0.0",
+ description="`example <{0}>`_".format(example_url)
+ )
+
+ assert set(dbapp.db.packaging.get_release_external_urls(
+ project['name'], "1.0"
+ )) == set((example_url,))
+
+
+def test_upsert_release_update(dbapp, user, release):
+ new_description = "this is an new dummy description"
+ dbapp.db.packaging.upsert_release(
+ release['project']['name'], release['version'],
+ user['username'], '0.0.0.0',
+ description=new_description
+ )
+ release_info = dbapp.db.packaging.get_release(
+ release['project']['name'], release['version']
+ )
+ assert release_info['description'] == new_description
+
+
+def test_upsert_release_update_release_dependencies(dbapp, user, release):
+ specifier_dict = {
+ ReleaseDependencyKind.requires_dist.value: set((
+ "foo",
+ )),
+ }
+
+ dbapp.db.packaging.upsert_release(
+ release['project']['name'], release['version'],
+ user['username'], '0.0.0.0',
+ release_dependencies=specifier_dict
+ )
+
+ assert dbapp.db.packaging.get_release_dependencies(
+ release['project']['name'], release['version']
+ ) == specifier_dict
+
+
+def test_upsert_bad_parameter(dbapp, user, project):
+ with pytest.raises(TypeError):
+ dbapp.db.packaging.upsert_release(
+ project['name'], '1.0', user['username'], '0.0.0.0',
+ badparam="imnotinreleasedb"
+ )
+
+
+def test_upsert_good_parameter(dbapp, user, project):
+ author = "imanauthor"
+ dbapp.db.packaging.upsert_release(
+ project['name'], '1.0', user['username'], '0.0.0.0',
+ author=author
+ )
+ assert dbapp.db.packaging.get_release(
+ project['name'], '1.0'
+ )['author'] == author
+
+
+def test_update_release_dependencies(dbapp, release):
+
+ specifier_dict = {
+ ReleaseDependencyKind.requires_dist.value: set((
+ "foo",
+ "bar"
+ )),
+ ReleaseDependencyKind.provides_dist.value: set((
+ "baz",
+ ))
+ }
+
+ p, v = release['project']['name'], release['version']
+
+ dbapp.db.packaging.update_release_dependencies(
+ p, v, specifier_dict
+ )
+
+ assert dbapp.db.packaging.get_release_dependencies(
+ p, v
+ ) == specifier_dict
+
+ specifier_dict = {
+ ReleaseDependencyKind.requires_dist.value: set((
+ "foo",
+ "bar"
+ )),
+ ReleaseDependencyKind.provides_dist.value: set((
+ "boobaz",
+ ))
+ }
+
+ dbapp.db.packaging.update_release_dependencies(
+ p, v, specifier_dict
+ )
+
+ assert dbapp.db.packaging.get_release_dependencies(
+ p, v
+ ) == specifier_dict
+
+
+def test_update_release_classifiers(dbapp, release):
+ dbapp.engine.execute(classifiers.insert().values(
+ id=1,
+ classifier="foo"
+ ))
+ dbapp.engine.execute(classifiers.insert().values(
+ id=2,
+ classifier="bar"
+ ))
+ dbapp.engine.execute(classifiers.insert().values(
+ id=3,
+ classifier="baz"
+ ))
+
+ dbapp.db.packaging.update_release_classifiers(
+ release['project']['name'],
+ release['version'],
+ set(('foo',))
+ )
+
+ assert set(dbapp.db.packaging.get_classifiers(
+ release['project']['name'],
+ release['version'])
+ ) == set(('foo',))
+
+ dbapp.db.packaging.update_release_classifiers(
+ release['project']['name'],
+ release['version'],
+ set(('bar', 'baz'))
+ )
+
+ assert set(dbapp.db.packaging.get_classifiers(
+ release['project']['name'],
+ release['version'])
+ ) == set(('bar', 'baz'))
+
+
+def test_update_external_urls(dbapp, release):
+ example_urls = ('http://example.com', 'http://example.com/test')
+ dbapp.db.packaging.update_release_external_urls(
+ release['project']['name'],
+ release['version'],
+ example_urls
+ )
+
+ assert set(dbapp.db.packaging.get_release_external_urls(
+ release['project']['name'],
+ release['version'])
+ ) == set(example_urls)
diff --git a/tests/test_db.py b/tests/test_db.py
--- a/tests/test_db.py
+++ b/tests/test_db.py
@@ -58,6 +58,27 @@ def test_scalar(value, default, expected, eargs, ekwargs):
assert result.scalar.calls == [pretend.call()]
+@pytest.mark.parametrize(
+ ("value", "default", "expected", "eargs", "ekwargs"), [
+ (None, {'foo': 'bar'}, {'foo': 'bar'}, [], {}),
+ ({'baz': 'bazbar'}, {'foo': 'bar'}, {'baz': 'bazbar'}, [], {})
+ ]
+)
+def test_first(value, default, expected, eargs, ekwargs):
+ result = pretend.stub(first=pretend.call_recorder(lambda: value))
+ execute = pretend.call_recorder(lambda q, *a, **kw: result)
+ model = pretend.stub(
+ engine=pretend.stub(execute=execute)
+ )
+
+ sql = db.first("SELECT * FROM thing LIMIT 1", default=default)
+
+ assert sql(model, *eargs, **ekwargs) == expected
+ assert execute.calls == [
+ pretend.call("SELECT * FROM thing LIMIT 1", *eargs, **ekwargs)
+ ]
+
+
@pytest.mark.parametrize(
("row_func", "value", "expected", "eargs", "ekwargs"),
[
@@ -135,3 +156,15 @@ def test_mapping(key_func, value_func, value, expected, eargs, ekwargs):
assert execute.calls == [
pretend.call("SELECT * FROM thing", *eargs, **ekwargs),
]
+
+
+def test_validate_argument_column_mapping():
+ table_stub = pretend.stub(
+ columns=(
+ pretend.stub(key="foo"),
+ pretend.stub(key="bar"),
+ ),
+ name="foobartable"
+ )
+ with pytest.raises(TypeError):
+ db.validate_argument_column_mapping("baz", table_stub)
diff --git a/tests/test_utils.py b/tests/test_utils.py
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -21,6 +21,7 @@
merge_dict, cache, get_wsgi_application, get_mimetype, redirect,
SearchPagination, is_valid_json_callback_name, generate_camouflage_url,
camouflage_images, cors, redirect_next, vary_by, random_token, is_safe_url,
+ find_links_from_html, normalize_project_name
)
@@ -313,3 +314,28 @@ def test_random_token():
])
def test_is_safe_url(url, host, expected):
assert is_safe_url(url, host) is expected
+
+
+@pytest.mark.parametrize(("html", "expected"), [
+ ("<a href='foo'>footext</a><div><a href='bar'>bartext</a><div>",
+ ["foo", "bar"]),
+])
+def test_find_links_from_html(html, expected):
+ assert find_links_from_html(html) == expected
+
+
+@pytest.mark.parametrize(("input_string", "expected"), [
+ ("imabad-name^^^", ValueError),
+ ("CaseInsensitive", "caseinsensitive"),
+ ("replace_underscores", "replace-underscores"),
+ ("-not-alphanumericstart", ValueError),
+ ("not-alphanumericend-", ValueError),
+ ("123456789", "123456789"),
+ ("hoobs#", ValueError)
+])
+def test_normalize_project_name(input_string, expected):
+ if expected is ValueError:
+ with pytest.raises(ValueError):
+ normalize_project_name(input_string)
+ else:
+ assert normalize_project_name(input_string) == expected
| store_package migration to master
This is a work-in-progress pull request to move the package upload method to warehouse, similar to #293.
I'm sending a WIP so I can get some feedback before I continue further. Things left to do are:
- writing tests for the db methods
- re-organizing packaging/db.py to be a little bit clearer
- pep8 / flake8 cleanup
| 2014-05-05T01:41:43Z | [] | [] |
|
pypi/warehouse | 320 | pypi__warehouse-320 | [
"319"
] | 4a4e4231e2ea5004c035de67986abd1bb9003d54 | diff --git a/warehouse/application.py b/warehouse/application.py
--- a/warehouse/application.py
+++ b/warehouse/application.py
@@ -48,6 +48,7 @@
from warehouse.csrf import handle_csrf
from warehouse.datastructures import AttributeDict
from warehouse.http import Request
+from warehouse.legacy.middlewares import LegacyRewriteMiddleware
from warehouse.middlewares import XForwardedTokenMiddleware
from warehouse.packaging import helpers as packaging_helpers
from warehouse.packaging.search import ProjectMapping
@@ -206,6 +207,12 @@ def __init__(self, config, engine=None, redis_class=redis.StrictRedis):
],
)
+ # Previously PyPI used a hand written disaptch method which depended
+ # on things like the request's content type or url parameters. In order
+ # to sanely support that in Warehouse we use this middleware to rewrite
+ # those to "internal" URLs which we can then dispatch based on.
+ self.wsgi_app = LegacyRewriteMiddleware(self.wsgi_app)
+
# This is last because we want it processed first in the stack of
# middlewares. This will ensure that we strip X-Forwarded-* headers
# if the request doesn't come from Fastly
diff --git a/warehouse/csrf.py b/warehouse/csrf.py
--- a/warehouse/csrf.py
+++ b/warehouse/csrf.py
@@ -111,7 +111,7 @@ def wrapped(self, view, app, request, *args, **kwargs):
raise SecurityError("No CSRF protection applied to view")
elif getattr(view, "_csrf", None):
# The function has explicitly opted in to the CSRF protection
- # and we ca assume that it has handled setting up the CSRF
+ # and we can assume that it has handled setting up the CSRF
# token as well as making sure that a Vary: Cookie header has
# been added.
_verify_origin(request)
diff --git a/warehouse/legacy/middlewares.py b/warehouse/legacy/middlewares.py
new file mode 100644
--- /dev/null
+++ b/warehouse/legacy/middlewares.py
@@ -0,0 +1,38 @@
+# Copyright 2014 Donald Stufft
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from warehouse.http import Request
+
+
+class LegacyRewriteMiddleware:
+ """
+ This middleware handles rewriting the legacy URLs and requests in order to
+ make it possible to dispatch them to different functions using the standard
+ Werkzeug dispatcher.
+ """
+
+ def __init__(self, app):
+ self.app = app
+
+ def __call__(self, environ, start_response):
+ request = Request(environ, populate_request=False, shallow=True)
+
+ # Our Legacy URLs are *always* under /pypi
+ if request.path[1:].split("/")[0] == "pypi":
+ # if the MIME type of the request is XML then we rewrite to our
+ # XMLRPC URL
+ if request.headers.get('Content-Type') == 'text/xml':
+ environ["PATH_INFO"] = "/_legacy/xmlrpc/"
+
+ return self.app(environ, start_response)
diff --git a/warehouse/legacy/pypi.py b/warehouse/legacy/pypi.py
--- a/warehouse/legacy/pypi.py
+++ b/warehouse/legacy/pypi.py
@@ -48,17 +48,13 @@ def deco(fn):
def pypi(app, request):
- # if the MIME type of the request is XML then we go into XML-RPC mode
- if request.headers.get('Content-Type') == 'text/xml':
- return xmlrpc.handle_request(app, request)
-
# check for the legacy :action-style dispatch
action = request.args.get(':action')
if action in _action_methods:
return _action_methods[action](app, request)
- # no XML-RPC and no :action means we render the index, or at least we
- # redirect to where it moved to
+ # No :action means we render the index, or at least we redirect to where it
+ # moved to
return redirect(
url_for(
request,
diff --git a/warehouse/legacy/urls.py b/warehouse/legacy/urls.py
--- a/warehouse/legacy/urls.py
+++ b/warehouse/legacy/urls.py
@@ -30,4 +30,7 @@
endpoint="project_json"),
Rule("/daytime", methods=["GET"], endpoint="daytime"),
]),
+ EndpointPrefix("warehouse.legacy.xmlrpc.", [
+ Rule("/_legacy/xmlrpc/", methods=["POST"], endpoint="handler"),
+ ]),
]
diff --git a/warehouse/legacy/xmlrpc.py b/warehouse/legacy/xmlrpc.py
--- a/warehouse/legacy/xmlrpc.py
+++ b/warehouse/legacy/xmlrpc.py
@@ -18,10 +18,12 @@
from werkzeug.exceptions import BadRequest
+from warehouse.csrf import csrf_exempt
from warehouse.http import Response
-def handle_request(app, request):
+@csrf_exempt
+def handler(app, request):
'''Wrap an invocation of the XML-RPC dispatcher.
'''
# unicode strings will be encoded in utf-8 by xmlrpclib
| diff --git a/tests/legacy/test_middlewares.py b/tests/legacy/test_middlewares.py
new file mode 100644
--- /dev/null
+++ b/tests/legacy/test_middlewares.py
@@ -0,0 +1,49 @@
+# Copyright 2014 Donald Stufft
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pretend
+
+from warehouse.legacy.middlewares import LegacyRewriteMiddleware
+
+
+def test_no_modification():
+ app = pretend.call_recorder(lambda environ, start_response: None)
+ LegacyRewriteMiddleware(app)({"PATH_INFO": "/foo/bar"}, None)
+
+ assert app.calls == [pretend.call({"PATH_INFO": "/foo/bar"}, None)]
+
+
+def test_pypi_passes_through():
+ app = pretend.call_recorder(lambda environ, start_response: None)
+ LegacyRewriteMiddleware(app)({"PATH_INFO": "/pypi"}, None)
+
+ assert app.calls == [pretend.call({"PATH_INFO": "/pypi"}, None)]
+
+
+def test_pypi_dispatches_xmlrpc():
+ app = pretend.call_recorder(lambda environ, start_response: None)
+ LegacyRewriteMiddleware(app)(
+ {
+ "PATH_INFO": "/pypi",
+ "CONTENT_TYPE": "text/xml",
+ },
+ None,
+ )
+
+ assert app.calls == [
+ pretend.call(
+ {"PATH_INFO": "/_legacy/xmlrpc/", "CONTENT_TYPE": "text/xml"},
+ None,
+ ),
+ ]
diff --git a/tests/legacy/test_pypi.py b/tests/legacy/test_pypi.py
--- a/tests/legacy/test_pypi.py
+++ b/tests/legacy/test_pypi.py
@@ -86,27 +86,6 @@ def test_pypi_route_action_double(monkeypatch):
pypi.register('test')
-def test_pypi_route_xmlrpc(monkeypatch):
- app = pretend.stub()
- request = pretend.stub(
- args={},
- headers={'Content-Type': 'text/xml'},
- )
-
- xmlrpc_stub = pretend.stub(
- handle_request=pretend.call_recorder(lambda *a: 'success')
- )
- monkeypatch.setattr(pypi, 'xmlrpc', xmlrpc_stub)
-
- # request for /pypi with no additional request information redirects
- # to site root
- #
- resp = pypi.pypi(app, request)
-
- assert xmlrpc_stub.handle_request.calls == [pretend.call(app, request)]
- assert resp == 'success'
-
-
def test_daytime(monkeypatch):
app = pretend.stub()
request = pretend.stub()
diff --git a/tests/legacy/test_xmlrpc.py b/tests/legacy/test_xmlrpc.py
--- a/tests/legacy/test_xmlrpc.py
+++ b/tests/legacy/test_xmlrpc.py
@@ -50,7 +50,7 @@ def test_xmlrpc_handler(monkeypatch):
get_data=lambda **k: xml_request,
)
- assert xmlrpc.handle_request(app, request) == 'response'
+ assert xmlrpc.handler(app, request) == 'response'
assert interface.list_packages.calls == [pretend.call()]
@@ -82,7 +82,7 @@ def test_xmlrpc_handler_size_limit(monkeypatch):
)
with pytest.raises(BadRequest):
- xmlrpc.handle_request(app, request)
+ xmlrpc.handler(app, request)
def test_xmlrpc_list_packages():
| XMLRPC API fails with 400 error
Here's the shell session:
```
>>> from xmlrpc.client import ServerProxy
>>> c = ServerProxy('https://warehouse.python.org/pypi')
>>> c.list_packages()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/Cellar/python3/3.4.0_1/Frameworks/Python.framework/Versions/3.4/lib/python3.4/xmlrpc/client.py", line 1090, in __call__
return self.__send(self.__name, args)
File "/usr/local/Cellar/python3/3.4.0_1/Frameworks/Python.framework/Versions/3.4/lib/python3.4/xmlrpc/client.py", line 1420, in __request
verbose=self.__verbose
File "/usr/local/Cellar/python3/3.4.0_1/Frameworks/Python.framework/Versions/3.4/lib/python3.4/xmlrpc/client.py", line 1132, in request
return self.single_request(host, handler, request_body, verbose)
File "/usr/local/Cellar/python3/3.4.0_1/Frameworks/Python.framework/Versions/3.4/lib/python3.4/xmlrpc/client.py", line 1165, in single_request
dict(resp.getheaders())
xmlrpc.client.ProtocolError: <ProtocolError for warehouse.python.org/pypi: 400 BAD REQUEST>
>>> c = ServerProxy('https://pypi.python.org/pypi')
>>> c.list_packages()
[lots of packages]
>>>
```
| I can reproduce this.
```
the:~ edurbin$ curl -X POST -H "Content-Type: text/xml" https://warehouse.python.org/pypi -d "<?xml version='1.0'?><methodCall><methodName>list_packages</methodName><params></params></methodCall>"
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
<title>400 Bad Request</title>
<h1>Bad Request</h1>
<p>No CSRF protection applied to view</p>
```
Whoops, we'll need to exempt that from CSRF.
| 2014-05-13T23:44:08Z | [] | [] |
pypi/warehouse | 321 | pypi__warehouse-321 | [
"318"
] | 073cb4d08edbd922adcc9e84b377cd7f5ffa8db8 | diff --git a/warehouse/utils.py b/warehouse/utils.py
--- a/warehouse/utils.py
+++ b/warehouse/utils.py
@@ -132,7 +132,8 @@ def redirect_next(request, default="/", field_name="next", code=303):
PACKAGE_REGEX = {
"permitted_characters": re.compile("^[a-zA-Z0-9_\-.]+$"),
- "start_and_end_with_ascii": re.compile("^[a-zA-Z0-9].*[a-zA-Z0-9]$"),
+ "start_with_alphanumeric": re.compile("^[a-zA-Z0-9].*"),
+ "end_with_alphanumeric": re.compile(".*[a-zA-Z0-9]$")
}
@@ -143,9 +144,12 @@ def normalize_project_name(name):
name = re.sub("_", "-", name).lower()
if not PACKAGE_REGEX["permitted_characters"].match(name):
raise ValueError("name contains illegal characters! (See PEP-426)")
- if not PACKAGE_REGEX["start_and_end_with_ascii"].match(name):
- raise ValueError("Distribution names MUST start and end with " +
- "an ASCII letter or digit (See PEP-426)")
+ if not (PACKAGE_REGEX["start_with_alphanumeric"].match(name) and
+ PACKAGE_REGEX["end_with_alphanumeric"].match(name)):
+ raise ValueError(
+ "Distribution names MUST start with and end with " +
+ "an ASCII letter or digit (See PEP-426)"
+ )
return name
| diff --git a/tests/test_utils.py b/tests/test_utils.py
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -331,7 +331,8 @@ def test_find_links_from_html(html, expected):
("-not-alphanumericstart", ValueError),
("not-alphanumericend-", ValueError),
("123456789", "123456789"),
- ("hoobs#", ValueError)
+ ("hoobs#", ValueError),
+ ("q", "q")
])
def test_normalize_project_name(input_string, expected):
if expected is ValueError:
| normalize_project_name doesn't appear to work with single character names
When accessing this url https://warehouse.python.org/project/q/, an Internal Server Error is received. The stracktrace is:
```
Stacktrace (most recent call last):
File "raven/middleware.py", line 35, in __call__
iterable = self.application(environ, start_response)
File "site-packages/guard.py", line 62, in __call__
return self.application(environ, _start_response)
File "werkzeug/wsgi.py", line 40, in <lambda>
return update_wrapper(lambda *a: f(*a)(*a[-2:]), f)
File "warehouse/application.py", line 330, in wsgi_app
return self.dispatch_view(view, self, request, **kwargs)
File "warehouse/sessions.py", line 143, in wrapped
resp = fn(self, view, app, request, *args, **kwargs)
File "warehouse/csrf.py", line 131, in wrapped
return fn(self, view, app, request, *args, **kwargs)
File "warehouse/application.py", line 286, in dispatch_view
return view(*args, **kwargs)
File "warehouse/utils.py", line 61, in wrapper
resp = fn(app, request, *args, **kwargs)
File "warehouse/fastly.py", line 50, in wrapped
self.format_keys(**ctx)
File "warehouse/fastly.py", line 65, in format_keys
for key in self.keys
File "warehouse/fastly.py", line 65, in <listcomp>
for key in self.keys
File "python3.4/string.py", line 164, in format
return self.vformat(format_string, args, kwargs)
File "python3.4/string.py", line 168, in vformat
result = self._vformat(format_string, args, kwargs, used_args, 2)
File "python3.4/string.py", line 194, in _vformat
obj = self.convert_field(obj, conversion)
File "warehouse/fastly.py", line 24, in convert_field
return normalize_project_name(value)
File "warehouse/utils.py", line 148, in normalize_project_name
"an ASCII letter or digit (See PEP-426)")
```
| The sentry error for this is https://app.getsentry.com/pypi/warehouse/group/19173368/
| 2014-05-17T09:00:07Z | [] | [] |
pypi/warehouse | 430 | pypi__warehouse-430 | [
"387"
] | 40a3eadfa2f57e06872e0994cee35e2f6b069672 | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -57,6 +57,7 @@
"warehouse.cli.db",
"warehouse.i18n",
"warehouse.legacy",
+ "warehouse.legacy.api",
"warehouse.migrations",
"warehouse.packaging",
"warehouse.utils",
diff --git a/warehouse/legacy/api/__init__.py b/warehouse/legacy/api/__init__.py
new file mode 100644
--- /dev/null
+++ b/warehouse/legacy/api/__init__.py
@@ -0,0 +1,11 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/warehouse/legacy/api/simple.py b/warehouse/legacy/api/simple.py
new file mode 100644
--- /dev/null
+++ b/warehouse/legacy/api/simple.py
@@ -0,0 +1,82 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyramid.httpexceptions import HTTPMovedPermanently
+from pyramid.view import view_config
+from sqlalchemy import func
+
+from warehouse.cache.http import cache_control
+from warehouse.cache.origin import origin_cache
+from warehouse.packaging.models import JournalEntry, File, Project, Release
+
+
+@view_config(
+ route_name="legacy.api.simple.index",
+ renderer="legacy/api/simple/index.html",
+ decorator=[
+ cache_control(10 * 60), # 10 minutes
+ origin_cache(7 * 24 * 60 * 60), # 7 days
+ ],
+)
+def simple_index(request):
+ # Get the latest serial number
+ serial = request.db.query(func.max(JournalEntry.id)).scalar() or 0
+ request.response.headers["X-PyPI-Last-Serial"] = serial
+
+ # Fetch the name and normalized name for all of our projects
+ projects = (
+ request.db.query(Project.name, Project.normalized_name)
+ .order_by(Project.normalized_name)
+ .all()
+ )
+
+ return {"projects": projects}
+
+
+@view_config(
+ route_name="legacy.api.simple.detail",
+ renderer="legacy/api/simple/detail.html",
+ decorator=[
+ cache_control(10 * 60), # 10 minutes
+ origin_cache(7 * 24 * 60 * 60), # 7 days
+ ],
+)
+def simple_detail(project, request):
+ # TODO: Handle files which are not hosted on PyPI
+
+ # Make sure that we're using the normalized version of the URL.
+ if (project.normalized_name !=
+ request.matchdict.get("name", project.normalized_name)):
+ return HTTPMovedPermanently(
+ request.current_route_url(name=project.normalized_name),
+ )
+
+ # Get the latest serial number for this project.
+ serial = (
+ request.db.query(func.max(JournalEntry.id))
+ .filter(JournalEntry.name == project.name)
+ .scalar()
+ )
+ request.response.headers["X-PyPI-Last-Serial"] = serial or 0
+
+ # Get all of the files for this project.
+ files = (
+ request.db.query(File)
+ .filter(
+ File.name == project.name,
+ File.version.in_(project.releases.with_entities(Release.version))
+ )
+ .order_by(File.filename)
+ .all()
+ )
+
+ return {"project": project, "files": files}
diff --git a/warehouse/legacy/tables.py b/warehouse/legacy/tables.py
--- a/warehouse/legacy/tables.py
+++ b/warehouse/legacy/tables.py
@@ -229,58 +229,6 @@
)
-journals = Table(
- "journals",
- db.metadata,
-
- Column("id", Integer(), primary_key=True, nullable=False),
- Column("name", Text()),
- Column("version", Text()),
- Column("action", Text()),
- Column("submitted_date", DateTime(timezone=False)),
- Column(
- "submitted_by",
- CIText(),
- ForeignKey(
- "accounts_user.username",
- onupdate="CASCADE",
- ),
- ),
- Column("submitted_from", Text()),
-)
-
-
-Index(
- "journals_changelog",
-
- journals.c.submitted_date,
- journals.c.name,
- journals.c.version,
- journals.c.action,
-)
-
-
-Index("journals_id_idx", journals.c.id)
-
-
-Index(
- "journals_latest_releases",
-
- journals.c.submitted_date,
- journals.c.name,
- journals.c.version,
- postgresql_where=(
- (journals.c.version != None) & (journals.c.action == "new release") # noqa
- ),
-)
-
-
-Index("journals_name_idx", journals.c.name)
-
-
-Index("journals_version_idx", journals.c.version)
-
-
mirrors = Table(
"mirrors",
db.metadata,
diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py
--- a/warehouse/packaging/models.py
+++ b/warehouse/packaging/models.py
@@ -278,3 +278,41 @@ def size(self):
Index("rel_class_trove_id_idx", "trove_id"),
Index("rel_class_version_id_idx", "version"),
)
+
+
+class JournalEntry(db.ModelBase):
+
+ __tablename__ = "journals"
+
+ @declared_attr
+ def __table_args__(cls): # noqa
+ return (
+ Index(
+ "journals_changelog",
+ "submitted_date", "name", "version", "action",
+ ),
+ Index("journals_id_idx", "id"),
+ Index("journals_name_idx", "name"),
+ Index("journals_version_idx", "version"),
+ Index(
+ "journals_latest_releases",
+ "submitted_date", "name", "version",
+ postgresql_where=(
+ (cls.version != None) & (cls.action == "new release") # noqa
+ ),
+ ),
+ )
+
+ id = Column(Integer, primary_key=True, nullable=False)
+ name = Column(Text)
+ version = Column(Text)
+ action = Column(Text)
+ submitted_date = Column(DateTime(timezone=False))
+ submitted_by = Column(
+ CIText,
+ ForeignKey(
+ "accounts_user.username",
+ onupdate="CASCADE",
+ ),
+ )
+ submitted_from = Column(Text)
diff --git a/warehouse/routes.py b/warehouse/routes.py
--- a/warehouse/routes.py
+++ b/warehouse/routes.py
@@ -36,3 +36,12 @@ def includeme(config):
traverse="/{name}/{version}",
)
config.add_route("packaging.file", "/packages/{path:.*}")
+
+ # Legacy URLs
+ config.add_route("legacy.api.simple.index", "/simple/")
+ config.add_route(
+ "legacy.api.simple.detail",
+ "/simple/{name}/",
+ factory="warehouse.packaging.models:ProjectFactory",
+ traverse="/{name}/",
+ )
| diff --git a/tests/common/db/packaging.py b/tests/common/db/packaging.py
--- a/tests/common/db/packaging.py
+++ b/tests/common/db/packaging.py
@@ -17,7 +17,9 @@
import factory
import factory.fuzzy
-from warehouse.packaging.models import Project, Release, Role, File
+from warehouse.packaging.models import (
+ Project, Release, Role, File, JournalEntry,
+)
from .accounts import UserFactory
from .base import WarehouseFactory
@@ -62,3 +64,16 @@ class Meta:
role_name = "Owner"
user = factory.SubFactory(UserFactory)
project = factory.SubFactory(ProjectFactory)
+
+
+class JournalEntryFactory(WarehouseFactory):
+ class Meta:
+ model = JournalEntry
+
+ id = factory.fuzzy.FuzzyInteger(low=1)
+ name = factory.fuzzy.FuzzyText(length=12)
+ version = factory.Sequence(lambda n: str(n) + ".0")
+ submitted_date = factory.fuzzy.FuzzyNaiveDateTime(
+ datetime.datetime(2008, 1, 1)
+ )
+ submitted_by = factory.SubFactory(UserFactory)
diff --git a/tests/legacy/__init__.py b/tests/legacy/__init__.py
new file mode 100644
--- /dev/null
+++ b/tests/legacy/__init__.py
@@ -0,0 +1,11 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/tests/legacy/api/__init__.py b/tests/legacy/api/__init__.py
new file mode 100644
--- /dev/null
+++ b/tests/legacy/api/__init__.py
@@ -0,0 +1,11 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/tests/legacy/api/test_simple.py b/tests/legacy/api/test_simple.py
new file mode 100644
--- /dev/null
+++ b/tests/legacy/api/test_simple.py
@@ -0,0 +1,167 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pretend
+
+from pyramid.httpexceptions import HTTPMovedPermanently
+
+from warehouse.legacy.api import simple
+
+from ...common.db.accounts import UserFactory
+from ...common.db.packaging import (
+ ProjectFactory, ReleaseFactory, FileFactory, JournalEntryFactory,
+)
+
+
+class TestSimpleIndex:
+
+ def test_no_results_no_serial(self, db_request):
+ assert simple.simple_index(db_request) == {"projects": []}
+ assert db_request.response.headers["X-PyPI-Last-Serial"] == 0
+
+ def test_no_results_with_serial(self, db_request):
+ user = UserFactory.create(session=db_request.db)
+ je = JournalEntryFactory.create(
+ session=db_request.db, submitted_by=user.username,
+ )
+ assert simple.simple_index(db_request) == {"projects": []}
+ assert db_request.response.headers["X-PyPI-Last-Serial"] == je.id
+
+ def test_with_results_no_serial(self, db_request):
+ projects = [
+ (x.name, x.normalized_name)
+ for x in
+ [ProjectFactory.create(session=db_request.db) for _ in range(3)]
+ ]
+ assert simple.simple_index(db_request) == {
+ "projects": sorted(projects, key=lambda x: x[1]),
+ }
+ assert db_request.response.headers["X-PyPI-Last-Serial"] == 0
+
+ def test_with_results_with_serial(self, db_request):
+ projects = [
+ (x.name, x.normalized_name)
+ for x in
+ [ProjectFactory.create(session=db_request.db) for _ in range(3)]
+ ]
+ user = UserFactory.create(session=db_request.db)
+ je = JournalEntryFactory.create(
+ session=db_request.db, submitted_by=user.username,
+ )
+
+ assert simple.simple_index(db_request) == {
+ "projects": sorted(projects, key=lambda x: x[1]),
+ }
+ assert db_request.response.headers["X-PyPI-Last-Serial"] == je.id
+
+
+class TestSimpleDetail:
+
+ def test_redirects(self, pyramid_request):
+ project = pretend.stub(normalized_name="foo")
+
+ pyramid_request.matchdict["name"] = "Foo"
+ pyramid_request.current_route_url = pretend.call_recorder(
+ lambda name: "/foobar/"
+ )
+
+ resp = simple.simple_detail(project, pyramid_request)
+
+ assert isinstance(resp, HTTPMovedPermanently)
+ assert resp.headers["Location"] == "/foobar/"
+ assert pyramid_request.current_route_url.calls == [
+ pretend.call(name="foo"),
+ ]
+
+ def test_no_files_no_serial(self, db_request):
+ project = ProjectFactory.create(session=db_request.db)
+ db_request.matchdict["name"] = project.normalized_name
+ user = UserFactory.create(session=db_request.db)
+ JournalEntryFactory.create(
+ session=db_request.db, submitted_by=user.username,
+ )
+
+ assert simple.simple_detail(project, db_request) == {
+ "project": project,
+ "files": [],
+ }
+ assert db_request.response.headers["X-PyPI-Last-Serial"] == 0
+
+ def test_no_files_with_seiral(self, db_request):
+ project = ProjectFactory.create(session=db_request.db)
+ db_request.matchdict["name"] = project.normalized_name
+ user = UserFactory.create(session=db_request.db)
+ je = JournalEntryFactory.create(
+ session=db_request.db,
+ name=project.name,
+ submitted_by=user.username,
+ )
+
+ assert simple.simple_detail(project, db_request) == {
+ "project": project,
+ "files": [],
+ }
+ assert db_request.response.headers["X-PyPI-Last-Serial"] == je.id
+
+ def test_with_files_no_serial(self, db_request):
+ project = ProjectFactory.create(session=db_request.db)
+ releases = [
+ ReleaseFactory.create(session=db_request.db, project=project)
+ for _ in range(3)
+ ]
+ files = [
+ FileFactory.create(
+ session=db_request.db,
+ release=r,
+ filename="{}-{}.tar.gz".format(project.name, r.version),
+ )
+ for r in releases
+ ]
+ db_request.matchdict["name"] = project.normalized_name
+ user = UserFactory.create(session=db_request.db)
+ JournalEntryFactory.create(
+ session=db_request.db, submitted_by=user.username,
+ )
+
+ assert simple.simple_detail(project, db_request) == {
+ "project": project,
+ "files": files,
+ }
+ assert db_request.response.headers["X-PyPI-Last-Serial"] == 0
+
+ def test_with_files_with_seiral(self, db_request):
+ project = ProjectFactory.create(session=db_request.db)
+ releases = [
+ ReleaseFactory.create(session=db_request.db, project=project)
+ for _ in range(3)
+ ]
+ files = [
+ FileFactory.create(
+ session=db_request.db,
+ release=r,
+ filename="{}-{}.tar.gz".format(project.name, r.version),
+ )
+ for r in releases
+ ]
+ db_request.matchdict["name"] = project.normalized_name
+ user = UserFactory.create(session=db_request.db)
+ je = JournalEntryFactory.create(
+ session=db_request.db,
+ name=project.name,
+ submitted_by=user.username,
+ )
+
+ assert simple.simple_detail(project, db_request) == {
+ "project": project,
+ "files": files,
+ }
+ assert db_request.response.headers["X-PyPI-Last-Serial"] == je.id
diff --git a/tests/test_routes.py b/tests/test_routes.py
--- a/tests/test_routes.py
+++ b/tests/test_routes.py
@@ -47,4 +47,11 @@ def add_route(*args, **kwargs):
traverse="/{name}/{version}",
),
pretend.call("packaging.file", "/packages/{path:.*}"),
+ pretend.call("legacy.api.simple.index", "/simple/"),
+ pretend.call(
+ "legacy.api.simple.detail",
+ "/simple/{name}/",
+ factory="warehouse.packaging.models:ProjectFactory",
+ traverse="/{name}/",
+ ),
]
| Implement the Simple API
We need to implement `/simple/` and `/simple/{name}/`.
| 2015-03-15T06:23:32Z | [] | [] |
|
pypi/warehouse | 432 | pypi__warehouse-432 | [
"420"
] | 6d1116176c611dff64cf30d31ce35ba270b5d6b0 | diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py
--- a/warehouse/packaging/views.py
+++ b/warehouse/packaging/views.py
@@ -15,13 +15,14 @@
from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound
from pyramid.response import FileIter, Response
from pyramid.view import view_config
+from sqlalchemy import func
from sqlalchemy.orm.exc import NoResultFound
from warehouse.accounts.models import User
from warehouse.cache.http import cache_control
from warehouse.cache.origin import origin_cache
from warehouse.packaging.interfaces import IDownloadStatService
-from warehouse.packaging.models import Release, File, Role
+from warehouse.packaging.models import Release, File, Role, JournalEntry
@view_config(
@@ -137,12 +138,6 @@ def packages(request):
if path == file_.pgp_path and not file_.has_pgp_signature:
raise HTTPNotFound
- # We also need to get the X-PyPI-Last-Serial for the project associated
- # with this file. Bandersnatch (and other mirroring clients) will use this
- # to determine what kind of action to take if the MD5 hash does not match
- # what it expected.
- # TODO: Get the X-PyPI-Last-Serial number for this.
-
# Try to open the file, streaming if possible, and if this file doesn't
# exist then we'll return a 404 error. However we'll log an error because
# if the database thinks we have a file, then a file should exist here.
@@ -164,7 +159,7 @@ def packages(request):
if path == file_.path:
content_length = file_.size
- return Response(
+ resp = Response(
# If we have a wsgi.file_wrapper, we'll want to use that so that, if
# possible, this will use an optimized method of sending. Otherwise
# we'll just use Pyramid's FileIter as a fallback.
@@ -190,3 +185,16 @@ def packages(request):
# they handle downloading this response.
content_length=content_length,
)
+
+ # We also need to get the X-PyPI-Last-Serial for the project associated
+ # with this file. Bandersnatch (and other mirroring clients) will use this
+ # to determine what kind of action to take if the MD5 hash does not match
+ # what it expected.
+ serial = (
+ request.db.query(func.max(JournalEntry.id))
+ .filter(JournalEntry.name == file_.name)
+ .scalar()
+ )
+ resp.headers["X-PyPI-Last-Serial"] = serial or 0
+
+ return resp
| diff --git a/tests/packaging/test_views.py b/tests/packaging/test_views.py
--- a/tests/packaging/test_views.py
+++ b/tests/packaging/test_views.py
@@ -25,6 +25,7 @@
from ..common.db.accounts import UserFactory
from ..common.db.packaging import (
ProjectFactory, ReleaseFactory, FileFactory, RoleFactory,
+ JournalEntryFactory,
)
@@ -241,6 +242,12 @@ def test_serves_package_file(self, db_request, pyramid_config):
filename="{}-{}.tar.gz".format(project.name, release.version),
python_version="source",
)
+ user = UserFactory.create(session=db_request.db)
+ je = JournalEntryFactory.create(
+ session=db_request.db,
+ name=project.name,
+ submitted_by=user.username,
+ )
path = "source/{}/{}/{}".format(
project.name[0], project.name, file_.filename
@@ -263,6 +270,7 @@ def test_serves_package_file(self, db_request, pyramid_config):
assert resp.etag == file_.md5_digest
assert resp.last_modified == last_modified
assert resp.content_length == 27
+ assert resp.headers["X-PyPI-Last-Serial"] == je.id
# This needs to be last, as accessing resp.body sets the content_length
assert resp.body == b"some data for the fake file"
@@ -279,6 +287,12 @@ def test_serves_signature_file(self, db_request, pyramid_config):
filename="{}-{}.tar.gz".format(project.name, release.version),
python_version="source",
)
+ user = UserFactory.create(session=db_request.db)
+ je = JournalEntryFactory.create(
+ session=db_request.db,
+ name=project.name,
+ submitted_by=user.username,
+ )
path = "source/{}/{}/{}.asc".format(
project.name[0], project.name, file_.filename
@@ -301,5 +315,6 @@ def test_serves_signature_file(self, db_request, pyramid_config):
assert resp.etag == file_.md5_digest
assert resp.last_modified == last_modified
assert resp.content_length is None
+ assert resp.headers["X-PyPI-Last-Serial"] == je.id
# This needs to be last, as accessing resp.body sets the content_length
assert resp.body == b"some data for the fake file"
| Implement X-PyPI-Last-Serial for the File View
The view that serves files doesn't implement the `X-PyPI-Last-Serial` header, however bandersnatch needs that header, so we need to implement it.
| 2015-03-15T06:49:30Z | [] | [] |
|
pypi/warehouse | 434 | pypi__warehouse-434 | [
"395"
] | 2a82eaf8f98b159514ee8aafd9a4adee83bd5c6c | diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -124,6 +124,10 @@ def configure(settings=None):
})
config.add_tween("warehouse.config.content_security_policy_tween_factory")
+ # If a route matches with a slash appended to it, redirect to that route
+ # instead of returning a HTTPNotFound.
+ config.add_notfound_view(append_slash=True)
+
# Configure the filesystems we use.
config.registry["filesystems"] = {}
for key, path in {
| diff --git a/tests/test_config.py b/tests/test_config.py
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -96,6 +96,7 @@ def __init__(self):
lambda d: configurator_settings.update(d)
),
add_tween=pretend.call_recorder(lambda tween_factory: None),
+ add_notfound_view=pretend.call_recorder(lambda append_slash: None),
add_static_view=pretend.call_recorder(
lambda name, path, cachebust: None
),
@@ -191,3 +192,6 @@ def __init__(self):
assert opener.calls == [
pretend.call("/srv/data/pypi/packages/", create_dir=True),
]
+ assert configurator_obj.add_notfound_view.calls == [
+ pretend.call(append_slash=True),
+ ]
| Redirect a slash-less URL to the slashed variant
We have urls like `/project/foobar/`, if someone enters `/project/foobar` we should redirect that to `/project/foobar/`.
| 2015-03-15T08:05:49Z | [] | [] |
|
pypi/warehouse | 436 | pypi__warehouse-436 | [
"405",
"437"
] | 99b9b45999d242941d08533fdab743cbea92ed83 | diff --git a/warehouse/db.py b/warehouse/db.py
--- a/warehouse/db.py
+++ b/warehouse/db.py
@@ -82,6 +82,7 @@ def includeme(config):
# Create our SQLAlchemy Engine.
config.registry["sqlalchemy.engine"] = sqlalchemy.create_engine(
config.registry.settings["database.url"],
+ isolation_level="SERIALIZABLE",
)
# Register our request.db property
| diff --git a/tests/test_db.py b/tests/test_db.py
--- a/tests/test_db.py
+++ b/tests/test_db.py
@@ -82,7 +82,7 @@ class FakeRegistry(dict):
settings = {"database.url": pretend.stub()}
engine = pretend.stub()
- create_engine = pretend.call_recorder(lambda url: engine)
+ create_engine = pretend.call_recorder(lambda url, isolation_level: engine)
config = pretend.stub(
add_directive=pretend.call_recorder(lambda *a: None),
registry=FakeRegistry(),
@@ -97,7 +97,10 @@ class FakeRegistry(dict):
pretend.call("alembic_config", _configure_alembic),
]
assert create_engine.calls == [
- pretend.call(config.registry.settings["database.url"]),
+ pretend.call(
+ config.registry.settings["database.url"],
+ isolation_level="SERIALIZABLE",
+ ),
]
assert config.registry["sqlalchemy.engine"] is engine
assert config.add_request_method.calls == [
| Prevent Race Conditions by Utilizing Transaction Isolation
We need to ensure that we don't get non-repeatable reads and phantom reads on some of the API endpoints. In particular anything that Bandersnatch is going to use that has a serial number we need to make sure that nothing changes between the query that gets the serial number and the quer(y|ies) that get the actual data.
Implement a read only route predicate
A possible solution to #405, see that issue for discussion.
Fixes #405
Closes #436
| 2015-03-15T15:20:39Z | [] | [] |
|
pypi/warehouse | 439 | pypi__warehouse-439 | [
"393"
] | 99b9b45999d242941d08533fdab743cbea92ed83 | diff --git a/warehouse/cache/http.py b/warehouse/cache/http.py
--- a/warehouse/cache/http.py
+++ b/warehouse/cache/http.py
@@ -75,8 +75,13 @@ def conditional_http_tween(request):
response.conditional_response = True
elif (isinstance(response.app_iter, collections.abc.Sequence) and
len(response.app_iter) == 1):
- response.conditional_response = True
- response.md5_etag()
+ # We can only reasonably implement automatic ETags on 200 responses
+ # to GET or HEAD requests. The subtles of doing it in other cases
+ # are too hard to get right.
+ if (request.method in {"GET", "HEAD"} and
+ response.status_code == 200):
+ response.conditional_response = True
+ response.md5_etag()
return response
return conditional_http_tween
| diff --git a/tests/cache/test_http.py b/tests/cache/test_http.py
--- a/tests/cache/test_http.py
+++ b/tests/cache/test_http.py
@@ -165,16 +165,18 @@ def test_explicit_etag(self):
assert handler.calls == [pretend.call(request)]
assert response.conditional_response
- def test_implicit_etag(self):
+ @pytest.mark.parametrize("method", ["GET", "HEAD"])
+ def test_implicit_etag(self, method):
response = pretend.stub(
last_modified=None,
etag=None,
conditional_response=False,
md5_etag=pretend.call_recorder(lambda: None),
app_iter=[b"foo"],
+ status_code=200,
)
handler = pretend.call_recorder(lambda request: response)
- request = pretend.stub()
+ request = pretend.stub(method=method)
tween = conditional_http_tween_factory(handler, pretend.stub())
@@ -183,6 +185,46 @@ def test_implicit_etag(self):
assert response.conditional_response
assert response.md5_etag.calls == [pretend.call()]
+ @pytest.mark.parametrize("method", ["GET", "HEAD"])
+ def test_no_implicit_etag_no_200(self, method):
+ response = pretend.stub(
+ last_modified=None,
+ etag=None,
+ conditional_response=False,
+ md5_etag=pretend.call_recorder(lambda: None),
+ app_iter=[b"foo"],
+ status_code=201,
+ )
+ handler = pretend.call_recorder(lambda request: response)
+ request = pretend.stub(method=method)
+
+ tween = conditional_http_tween_factory(handler, pretend.stub())
+
+ assert tween(request) is response
+ assert handler.calls == [pretend.call(request)]
+ assert not response.conditional_response
+ assert response.md5_etag.calls == []
+
+ @pytest.mark.parametrize("method", ["POST", "PUT"])
+ def test_no_implicit_etag_wrong_method(self, method):
+ response = pretend.stub(
+ last_modified=None,
+ etag=None,
+ conditional_response=False,
+ md5_etag=pretend.call_recorder(lambda: None),
+ app_iter=[b"foo"],
+ status_code=200,
+ )
+ handler = pretend.call_recorder(lambda request: response)
+ request = pretend.stub(method=method)
+
+ tween = conditional_http_tween_factory(handler, pretend.stub())
+
+ assert tween(request) is response
+ assert handler.calls == [pretend.call(request)]
+ assert not response.conditional_response
+ assert response.md5_etag.calls == []
+
def test_no_etag(self):
response = pretend.stub(
last_modified=None,
| Double check conditional HTTP implementation
The conditional HTTP implementation doesn't check the status code of the response at all. Determine if it should, and if it should update it to do the right thing.
| 2015-03-15T17:01:27Z | [] | [] |
|
pypi/warehouse | 440 | pypi__warehouse-440 | [
"392"
] | 2bff691ec5efc3c72daff217abca8c0a363211c6 | diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -103,6 +103,9 @@ def configure(settings=None):
# Allow the packaging app to register any services it has.
config.include(".packaging")
+ # Configure redirection support
+ config.include(".redirects")
+
# Register all our URL routes for Warehouse.
config.include(".routes")
diff --git a/warehouse/redirects.py b/warehouse/redirects.py
new file mode 100644
--- /dev/null
+++ b/warehouse/redirects.py
@@ -0,0 +1,30 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyramid.httpexceptions import HTTPMovedPermanently
+
+
+def redirect_view_factory(target, redirect=HTTPMovedPermanently):
+ def redirect_view(request):
+ return redirect(target.format(_request=request, **request.matchdict))
+ return redirect_view
+
+
+def add_redirect(config, source, target, **kw):
+ route_name = "warehouse.redirects." + source
+
+ config.add_route(route_name, source)
+ config.add_view(redirect_view_factory(target, **kw), route_name=route_name)
+
+
+def includeme(config):
+ config.add_directive("add_redirect", add_redirect, action_wrap=False)
diff --git a/warehouse/routes.py b/warehouse/routes.py
--- a/warehouse/routes.py
+++ b/warehouse/routes.py
@@ -45,3 +45,10 @@ def includeme(config):
factory="warehouse.packaging.models:ProjectFactory",
traverse="/{name}/",
)
+
+ # Legacy Redirects
+ config.add_redirect("/pypi/{name}/", "/project/{name}/")
+ config.add_redirect(
+ "/pypi/{name}/{version}/",
+ "/project/{name}/{version}/",
+ )
| diff --git a/tests/test_config.py b/tests/test_config.py
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -143,6 +143,7 @@ def __init__(self):
pretend.call(".csrf"),
pretend.call(".accounts"),
pretend.call(".packaging"),
+ pretend.call(".redirects"),
pretend.call(".routes"),
]
assert configurator_obj.add_jinja2_renderer.calls == [
diff --git a/tests/test_redirects.py b/tests/test_redirects.py
new file mode 100644
--- /dev/null
+++ b/tests/test_redirects.py
@@ -0,0 +1,67 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pretend
+
+from pyramid.httpexceptions import HTTPMovedPermanently
+
+from warehouse import redirects
+
+
+def test_redirect_view():
+ target = "/{wat}/{_request.method}"
+ view = redirects.redirect_view_factory(target)
+
+ request = pretend.stub(method="GET", matchdict={"wat": "the-thing"})
+ resp = view(request)
+
+ assert isinstance(resp, HTTPMovedPermanently)
+ assert resp.headers["Location"] == "/the-thing/GET"
+
+
+def test_add_redirect(monkeypatch):
+ rview = pretend.stub()
+ rview_factory = pretend.call_recorder(lambda target, redirect: rview)
+ monkeypatch.setattr(redirects, "redirect_view_factory", rview_factory)
+
+ config = pretend.stub(
+ add_route=pretend.call_recorder(lambda name, route: None),
+ add_view=pretend.call_recorder(lambda view, route_name: None),
+ )
+
+ source = "/the/{thing}/"
+ target = "/other/{thing}/"
+ redirect = pretend.stub()
+
+ redirects.add_redirect(config, source, target, redirect=redirect)
+
+ assert config.add_route.calls == [
+ pretend.call("warehouse.redirects." + source, source),
+ ]
+ assert config.add_view.calls == [
+ pretend.call(rview, route_name="warehouse.redirects." + source),
+ ]
+ assert rview_factory.calls == [pretend.call(target, redirect=redirect)]
+
+
+def test_includeme():
+ config = pretend.stub(
+ add_directive=pretend.call_recorder(lambda n, fn, action_wrap: None),
+ )
+ redirects.includeme(config)
+ assert config.add_directive.calls == [
+ pretend.call(
+ "add_redirect",
+ redirects.add_redirect,
+ action_wrap=False,
+ ),
+ ]
diff --git a/tests/test_routes.py b/tests/test_routes.py
--- a/tests/test_routes.py
+++ b/tests/test_routes.py
@@ -22,6 +22,11 @@ class FakeConfig:
def add_route(*args, **kwargs):
pass
+ @staticmethod
+ @pretend.call_recorder
+ def add_redirect(*args, **kwargs):
+ pass
+
config = FakeConfig()
includeme(config)
@@ -55,3 +60,8 @@ def add_route(*args, **kwargs):
traverse="/{name}/",
),
]
+
+ assert config.add_redirect.calls == [
+ pretend.call("/pypi/{name}/", "/project/{name}/"),
+ pretend.call("/pypi/{name}/{version}/", "/project/{name}/{version}/"),
+ ]
| Implement redirects from the old PyPI URLs to the new Project URLs
| 2015-03-15T18:41:18Z | [] | [] |
|
pypi/warehouse | 442 | pypi__warehouse-442 | [
"433"
] | 3b43c6b3d52de1c2420787e8507f231b4761bda3 | diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py
--- a/warehouse/packaging/views.py
+++ b/warehouse/packaging/views.py
@@ -15,14 +15,13 @@
from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound
from pyramid.response import FileIter, Response
from pyramid.view import view_config
-from sqlalchemy import func
from sqlalchemy.orm.exc import NoResultFound
from warehouse.accounts.models import User
from warehouse.cache.http import cache_control
from warehouse.cache.origin import origin_cache
from warehouse.packaging.interfaces import IDownloadStatService
-from warehouse.packaging.models import Release, File, Role, JournalEntry
+from warehouse.packaging.models import Release, File, Role
@view_config(
@@ -113,9 +112,7 @@ def packages(request):
# The amount of logic that we can do in this view is very limited, this
# view needs to be able to be handled by Fastly directly hitting S3 instead
# of actually hitting this view. This more or less means that we're limited
- # to just setting headers and serving the actual file. In addition the
- # headers that we can set, have to be able to be determined at file upload
- # time instead of dynamically.
+ # to just serving the actual file.
# Grab the path of the file that we're attempting to serve
path = request.matchdict["path"]
@@ -159,7 +156,7 @@ def packages(request):
if path == file_.path:
content_length = file_.size
- resp = Response(
+ return Response(
# If we have a wsgi.file_wrapper, we'll want to use that so that, if
# possible, this will use an optimized method of sending. Otherwise
# we'll just use Pyramid's FileIter as a fallback.
@@ -185,16 +182,3 @@ def packages(request):
# they handle downloading this response.
content_length=content_length,
)
-
- # We also need to get the X-PyPI-Last-Serial for the project associated
- # with this file. Bandersnatch (and other mirroring clients) will use this
- # to determine what kind of action to take if the MD5 hash does not match
- # what it expected.
- serial = (
- request.db.query(func.max(JournalEntry.id))
- .filter(JournalEntry.name == file_.name)
- .scalar()
- )
- resp.headers["X-PyPI-Last-Serial"] = serial or 0
-
- return resp
| diff --git a/tests/packaging/test_views.py b/tests/packaging/test_views.py
--- a/tests/packaging/test_views.py
+++ b/tests/packaging/test_views.py
@@ -25,7 +25,6 @@
from ..common.db.accounts import UserFactory
from ..common.db.packaging import (
ProjectFactory, ReleaseFactory, FileFactory, RoleFactory,
- JournalEntryFactory,
)
@@ -242,12 +241,6 @@ def test_serves_package_file(self, db_request, pyramid_config):
filename="{}-{}.tar.gz".format(project.name, release.version),
python_version="source",
)
- user = UserFactory.create(session=db_request.db)
- je = JournalEntryFactory.create(
- session=db_request.db,
- name=project.name,
- submitted_by=user.username,
- )
path = "source/{}/{}/{}".format(
project.name[0], project.name, file_.filename
@@ -270,7 +263,6 @@ def test_serves_package_file(self, db_request, pyramid_config):
assert resp.etag == file_.md5_digest
assert resp.last_modified == last_modified
assert resp.content_length == 27
- assert resp.headers["X-PyPI-Last-Serial"] == je.id
# This needs to be last, as accessing resp.body sets the content_length
assert resp.body == b"some data for the fake file"
@@ -287,12 +279,6 @@ def test_serves_signature_file(self, db_request, pyramid_config):
filename="{}-{}.tar.gz".format(project.name, release.version),
python_version="source",
)
- user = UserFactory.create(session=db_request.db)
- je = JournalEntryFactory.create(
- session=db_request.db,
- name=project.name,
- submitted_by=user.username,
- )
path = "source/{}/{}/{}.asc".format(
project.name[0], project.name, file_.filename
@@ -315,6 +301,5 @@ def test_serves_signature_file(self, db_request, pyramid_config):
assert resp.etag == file_.md5_digest
assert resp.last_modified == last_modified
assert resp.content_length is None
- assert resp.headers["X-PyPI-Last-Serial"] == je.id
# This needs to be last, as accessing resp.body sets the content_length
assert resp.body == b"some data for the fake file"
| Deprecate & Remove X-PyPI-Last-Serial on File Responses
Ideally we'd like to not serve the file responses via the Warehouse web application, however we currently need to do this in order to add the `X-PyPI-Last-Serial` responses. Bandersnatch is currently using this, so it would be great to investigate what it's being used for and if we can safely deprecate and remove it.
| This isn't just _used by_ Bandersnatch, it was added specifically _for_
Bandersnatch :)
On Sun, 15 Mar 2015 at 17:52 Donald Stufft notifications@github.com wrote:
> Ideally we'd like to not serve the file responses via the Warehouse web
> application, however we currently need to do this in order to add the
> X-PyPI-Last-Serial responses. Bandersnatch is currently using this, so it
> would be great to investigate what it's being used for and if we can safely
> deprecate and remove it.
>
> —
> Reply to this email directly or view it on GitHub
> https://github.com/pypa/warehouse/issues/433.
Yea, I just wasn't sure if anyone else was using it or not.
Ok, I've talked to @ctheune and Bandersnatch is going to remove the need for this. Changes to PyPI (disallowing delete + reupload as a general case) remove the primary need for this since the contents located at a file URL are likely never going to change except to possibly be deleted.
| 2015-03-16T14:56:30Z | [] | [] |
pypi/warehouse | 446 | pypi__warehouse-446 | [
"243",
"333"
] | 0c30780241ffa5fe365384c868660643e343448b | diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -13,10 +13,10 @@
import fs.opener
import transaction
+from pyramid import renderers
from pyramid.config import Configurator
from pyramid.httpexceptions import HTTPMovedPermanently
from tzf.pyramid_yml import config_defaults
-
from warehouse.utils.static import WarehouseCacheBuster
@@ -71,6 +71,13 @@ def configure(settings=None):
# so we'll go ahead and add that to the Jinja2 search path.
config.add_jinja2_search_path("warehouse:templates", name=".html")
+ # We want to configure our JSON renderer to sort the keys, and also to use
+ # an ultra compact serialization format.
+ config.add_renderer(
+ "json",
+ renderers.JSON(sort_keys=True, separators=(",", ":")),
+ )
+
# Configure our transaction handling so that each request gets it's own
# transaction handler and the lifetime of the transaction is tied to the
# lifetime of the request.
diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py
new file mode 100644
--- /dev/null
+++ b/warehouse/legacy/api/json.py
@@ -0,0 +1,152 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound
+from pyramid.view import view_config
+from sqlalchemy import func
+from sqlalchemy.orm.exc import NoResultFound
+
+from warehouse.cache.http import cache_control
+from warehouse.cache.origin import origin_cache
+from warehouse.packaging.interfaces import IDownloadStatService
+from warehouse.packaging.models import File, Release, JournalEntry
+
+
+@view_config(
+ route_name="legacy.api.json.project",
+ renderer="json",
+ decorator=[
+ cache_control(1 * 24 * 60 * 60), # 1 day
+ origin_cache(7 * 24 * 60 * 60), # 7 days
+ ],
+)
+def json_project(project, request):
+ if project.name != request.matchdict.get("name", project.name):
+ return HTTPMovedPermanently(
+ request.current_route_url(name=project.name),
+ )
+
+ try:
+ release = project.releases.order_by(
+ Release._pypi_ordering.desc()
+ ).limit(1).one()
+ except NoResultFound:
+ raise HTTPNotFound from None
+
+ return json_release(release, request)
+
+
+@view_config(
+ route_name="legacy.api.json.release",
+ renderer="json",
+ decorator=[
+ cache_control(7 * 24 * 60 * 60), # 7 days
+ origin_cache(30 * 24 * 60 * 60), # 30 days
+ ],
+)
+def json_release(release, request):
+ project = release.project
+
+ if project.name != request.matchdict.get("name", project.name):
+ return HTTPMovedPermanently(
+ request.current_route_url(name=project.name),
+ )
+
+ # We want to allow CORS here to enable anyone to fetch data from this API
+ request.response.headers["Access-Control-Allow-Origin"] = "*"
+
+ # Get the latest serial number for this project.
+ serial = (
+ request.db.query(func.max(JournalEntry.id))
+ .filter(JournalEntry.name == project.name)
+ .scalar()
+ )
+ request.response.headers["X-PyPI-Last-Serial"] = serial or 0
+
+ # Get all of the releases and files for this project.
+ release_files = (
+ request.db.query(Release, File)
+ .outerjoin(File)
+ .filter(Release.project == project)
+ .order_by(Release._pypi_ordering.desc(), File.filename)
+ .all()
+ )
+
+ # Map our releases + files into a dictionary that maps each release to a
+ # list of all its files.
+ releases = {}
+ for r, file_ in release_files:
+ files = releases.setdefault(r, [])
+ if file_ is not None:
+ files.append(file_)
+
+ # Serialize our database objects to match the way that PyPI legacy
+ # presented this data.
+ releases = {
+ r.version: [
+ {
+ "filename": f.filename,
+ "packagetype": f.packagetype,
+ "python_version": f.python_version,
+ "has_sig": f.has_pgp_signature,
+ "comment_text": f.comment_text,
+ "md5_digest": f.md5_digest,
+ "size": f.size,
+ "downloads": f.downloads,
+ "upload_time": f.upload_time.strftime("%Y-%m-%dT%H:%M:%S"),
+ "url": request.route_url("packaging.file", path=f.path),
+ }
+ for f in fs
+ ]
+ for r, fs in releases.items()
+ }
+
+ # Get our stats service
+ stats_svc = request.find_service(IDownloadStatService)
+
+ return {
+ "info": {
+ "name": project.name,
+ "version": release.version,
+ "summary": release.summary,
+ "description": release.description,
+ "keywords": release.keywords,
+ "license": release.license,
+ "classifiers": list(release.classifiers),
+ "author": release.author,
+ "author_email": release.author_email,
+ "maintainer": release.maintainer,
+ "maintainer_email": release.maintainer_email,
+ "requires_python": release.requires_python,
+ "platform": release.platform,
+ "downloads": {
+ "last_day": stats_svc.get_daily_stats(project.name),
+ "last_week": stats_svc.get_weekly_stats(project.name),
+ "last_month": stats_svc.get_monthly_stats(project.name),
+ },
+ "project_url": request.route_url(
+ "packaging.project",
+ name=project.name,
+ ),
+ "release_url": request.route_url(
+ "packaging.release",
+ name=project.name,
+ version=release.version,
+ ),
+ "docs_url": project.documentation_url,
+ "bugtrack_url": project.bugtrack_url,
+ "home_page": release.home_page,
+ "download_url": release.download_url,
+ },
+ "urls": releases[release.version],
+ "releases": releases,
+ }
diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py
--- a/warehouse/packaging/models.py
+++ b/warehouse/packaging/models.py
@@ -13,13 +13,14 @@
import fs.errors
from citext import CIText
-from pyramid.threadlocal import get_current_registry
+from pyramid.threadlocal import get_current_registry, get_current_request
from sqlalchemy import (
CheckConstraint, Column, Enum, ForeignKey, ForeignKeyConstraint, Index,
Boolean, DateTime, Integer, Table, Text,
)
from sqlalchemy import func, orm, sql
from sqlalchemy.orm.exc import NoResultFound
+from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.ext.hybrid import hybrid_property
@@ -107,6 +108,21 @@ def __getitem__(self, version):
except NoResultFound:
raise KeyError from None
+ @property
+ def documentation_url(self):
+ # TODO: Move this into the database and elimnate the use of the
+ # threadlocal here.
+ registry = get_current_registry()
+ request = get_current_request()
+
+ path = "/".join([self.name, "index.html"])
+
+ # If the path doesn't exist, then we'll just return a None here.
+ if not registry["filesystems"]["documentation"].exists(path):
+ return
+
+ return request.route_url("legacy.docs", project=self.name)
+
class Release(db.ModelBase):
@@ -163,12 +179,13 @@ def __table_args__(cls): # noqa
server_default=sql.func.now(),
)
- classifiers = orm.relationship(
+ _classifiers = orm.relationship(
Classifier,
backref="project_releases",
secondary=lambda: release_classifiers,
order_by=Classifier.classifier,
)
+ classifiers = association_proxy("_classifiers", "classifier")
files = orm.relationship(
"File",
diff --git a/warehouse/routes.py b/warehouse/routes.py
--- a/warehouse/routes.py
+++ b/warehouse/routes.py
@@ -45,6 +45,21 @@ def includeme(config):
factory="warehouse.packaging.models:ProjectFactory",
traverse="/{name}/",
)
+ config.add_route(
+ "legacy.api.json.project",
+ "/pypi/{name}/json",
+ factory="warehouse.packaging.models:ProjectFactory",
+ traverse="/{name}",
+ )
+ config.add_route(
+ "legacy.api.json.release",
+ "/pypi/{name}/{version}/json",
+ factory="warehouse.packaging.models:ProjectFactory",
+ traverse="/{name}/{version}",
+ )
+
+ # Legacy Documentation
+ config.add_route("legacy.docs", "https://pythonhosted.org/{project}/")
# Legacy Redirects
config.add_redirect("/pypi/{name}/", "/project/{name}/")
| diff --git a/tests/legacy/api/test_json.py b/tests/legacy/api/test_json.py
new file mode 100644
--- /dev/null
+++ b/tests/legacy/api/test_json.py
@@ -0,0 +1,230 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pretend
+import pytest
+
+from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound
+
+from warehouse.legacy.api import json
+
+from ...common.db.packaging import ProjectFactory, ReleaseFactory, FileFactory
+
+
+class TestJSONProject:
+
+ def test_normalizing_redirects(self, db_request):
+ project = ProjectFactory.create(session=db_request.db)
+
+ name = project.name.lower()
+ if name == project.name:
+ name = project.name.upper()
+
+ db_request.matchdict = {"name": name}
+ db_request.current_route_url = pretend.call_recorder(
+ lambda name: "/project/the-redirect/"
+ )
+
+ resp = json.json_project(project, db_request)
+
+ assert isinstance(resp, HTTPMovedPermanently)
+ assert resp.headers["Location"] == "/project/the-redirect/"
+ assert db_request.current_route_url.calls == [
+ pretend.call(name=project.name),
+ ]
+
+ def test_missing_release(self, db_request):
+ project = ProjectFactory.create(session=db_request.db)
+
+ with pytest.raises(HTTPNotFound):
+ json.json_project(project, db_request)
+
+ def test_calls_release_detail(self, monkeypatch, db_request):
+ project = ProjectFactory.create(session=db_request.db)
+
+ ReleaseFactory.create(
+ session=db_request.db, project=project, version="1.0",
+ )
+ ReleaseFactory.create(
+ session=db_request.db, project=project, version="2.0",
+ )
+
+ release = ReleaseFactory.create(
+ session=db_request.db, project=project, version="3.0",
+ )
+
+ response = pretend.stub()
+ json_release = pretend.call_recorder(lambda ctx, request: response)
+ monkeypatch.setattr(json, "json_release", json_release)
+
+ resp = json.json_project(project, db_request)
+
+ assert resp is response
+ assert json_release.calls == [pretend.call(release, db_request)]
+
+
+class TestJSONRelease:
+
+ def test_normalizing_redirects(self, db_request):
+ project = ProjectFactory.create(session=db_request.db)
+ release = ReleaseFactory.create(
+ session=db_request.db, project=project, version="3.0",
+ )
+
+ name = release.project.name.lower()
+ if name == release.project.name:
+ name = release.project.name.upper()
+
+ db_request.matchdict = {"name": name}
+ db_request.current_route_url = pretend.call_recorder(
+ lambda name: "/project/the-redirect/3.0/"
+ )
+
+ resp = json.json_release(release, db_request)
+
+ assert isinstance(resp, HTTPMovedPermanently)
+ assert resp.headers["Location"] == "/project/the-redirect/3.0/"
+ assert db_request.current_route_url.calls == [
+ pretend.call(name=release.project.name),
+ ]
+
+ def test_detail_renders(self, pyramid_config, db_request):
+ project = ProjectFactory.create(session=db_request.db)
+ releases = [
+ ReleaseFactory.create(
+ session=db_request.db, project=project, version=v,
+ )
+ for v in ["1.0", "2.0", "3.0"]
+ ]
+ files = [
+ FileFactory.create(
+ session=db_request.db,
+ release=r,
+ filename="{}-{}.tar.gz".format(project.name, r.version),
+ python_version="source",
+ )
+ for r in releases[:-1]
+ ]
+
+ daily_stats = pretend.stub()
+ weekly_stats = pretend.stub()
+ monthly_stats = pretend.stub()
+
+ db_request.find_service = lambda x: pretend.stub(
+ get_daily_stats=lambda p: daily_stats,
+ get_weekly_stats=lambda p: weekly_stats,
+ get_monthly_stats=lambda p: monthly_stats,
+ )
+
+ url = "/the/fake/url/"
+ db_request.route_url = pretend.call_recorder(lambda *args, **kw: url)
+
+ pyramid_config.registry["filesystems"] = {
+ "packages": pretend.stub(
+ exists=lambda x: True,
+ getsize=lambda x: 200,
+ ),
+ "documentation": pretend.stub(exists=lambda x: True),
+ }
+
+ result = json.json_release(releases[1], db_request)
+
+ assert set(db_request.route_url.calls) == {
+ pretend.call("packaging.file", path=files[0].path),
+ pretend.call("packaging.file", path=files[1].path),
+ pretend.call("packaging.project", name=project.name),
+ pretend.call(
+ "packaging.release",
+ name=project.name,
+ version=releases[1].version,
+ ),
+ pretend.call("legacy.docs", project=project.name),
+ }
+ assert result == {
+ "info": {
+ "author": None,
+ "author_email": None,
+ "bugtrack_url": None,
+ "classifiers": [],
+ "description": None,
+ "docs_url": "/the/fake/url/",
+ "download_url": None,
+ "downloads": {
+ "last_day": daily_stats,
+ "last_week": weekly_stats,
+ "last_month": monthly_stats,
+ },
+ "home_page": None,
+ "keywords": None,
+ "license": None,
+ "maintainer": None,
+ "maintainer_email": None,
+ "name": project.name,
+ "platform": None,
+ "project_url": "/the/fake/url/",
+ "release_url": "/the/fake/url/",
+ "requires_python": None,
+ "summary": None,
+ "version": "2.0",
+ },
+ "releases": {
+ "1.0": [
+ {
+ "comment_text": None,
+ "downloads": 0,
+ "filename": files[0].filename,
+ "has_sig": True,
+ "md5_digest": files[0].md5_digest,
+ "packagetype": None,
+ "python_version": "source",
+ "size": 200,
+ "upload_time": files[0].upload_time.strftime(
+ "%Y-%m-%dT%H:%M:%S",
+ ),
+ "url": "/the/fake/url/",
+ },
+ ],
+ "2.0": [
+ {
+ "comment_text": None,
+ "downloads": 0,
+ "filename": files[1].filename,
+ "has_sig": True,
+ "md5_digest": files[1].md5_digest,
+ "packagetype": None,
+ "python_version": "source",
+ "size": 200,
+ "upload_time": files[1].upload_time.strftime(
+ "%Y-%m-%dT%H:%M:%S",
+ ),
+ "url": "/the/fake/url/",
+ },
+ ],
+ "3.0": [],
+ },
+ "urls": [
+ {
+ "comment_text": None,
+ "downloads": 0,
+ "filename": files[1].filename,
+ "has_sig": True,
+ "md5_digest": files[1].md5_digest,
+ "packagetype": None,
+ "python_version": "source",
+ "size": 200,
+ "upload_time": files[1].upload_time.strftime(
+ "%Y-%m-%dT%H:%M:%S",
+ ),
+ "url": "/the/fake/url/",
+ },
+ ],
+ }
diff --git a/tests/packaging/test_models.py b/tests/packaging/test_models.py
--- a/tests/packaging/test_models.py
+++ b/tests/packaging/test_models.py
@@ -63,6 +63,45 @@ def test_traversal_cant_find(self, db_request):
with pytest.raises(KeyError):
project["1.0"]
+ def test_doc_url_doesnt_exist(self, pyramid_config, db_request):
+ @pretend.call_recorder
+ def exists(path):
+ return False
+
+ pyramid_config.registry["filesystems"] = {
+ "documentation": pretend.stub(exists=exists),
+ }
+
+ project = DBProjectFactory.create(session=db_request.db)
+
+ assert project.documentation_url is None
+ assert exists.calls == [
+ pretend.call("/".join([project.name, "index.html"])),
+ ]
+
+ def test_doc_url(self, pyramid_config, db_request):
+ @pretend.call_recorder
+ def exists(path):
+ return True
+
+ pyramid_config.registry["filesystems"] = {
+ "documentation": pretend.stub(exists=exists),
+ }
+
+ db_request.route_url = pretend.call_recorder(
+ lambda route, **kw: "/the/docs/url/"
+ )
+
+ project = DBProjectFactory.create(session=db_request.db)
+
+ assert project.documentation_url == "/the/docs/url/"
+ assert exists.calls == [
+ pretend.call("/".join([project.name, "index.html"])),
+ ]
+ assert db_request.route_url.calls == [
+ pretend.call("legacy.docs", project=project.name),
+ ]
+
class TestFile:
diff --git a/tests/test_config.py b/tests/test_config.py
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -16,6 +16,7 @@
import pretend
import pytest
+from pyramid import renderers
from pyramid.httpexceptions import HTTPMovedPermanently
from warehouse import config
@@ -79,6 +80,10 @@ def test_configure(monkeypatch, settings):
opener = pretend.call_recorder(lambda path, create_dir: fs_obj)
monkeypatch.setattr(fs.opener, "fsopendir", opener)
+ json_renderer_obj = pretend.stub()
+ json_renderer_cls = pretend.call_recorder(lambda **kw: json_renderer_obj)
+ monkeypatch.setattr(renderers, "JSON", json_renderer_cls)
+
class FakeRegistry(dict):
def __init__(self):
self.settings = {
@@ -91,6 +96,7 @@ def __init__(self):
configurator_obj = pretend.stub(
registry=FakeRegistry(),
include=pretend.call_recorder(lambda include: None),
+ add_renderer=pretend.call_recorder(lambda name, renderer: None),
add_jinja2_renderer=pretend.call_recorder(lambda renderer: None),
add_jinja2_search_path=pretend.call_recorder(lambda path, name: None),
get_settings=lambda: configurator_settings,
@@ -198,3 +204,10 @@ def __init__(self):
assert configurator_obj.add_notfound_view.calls == [
pretend.call(append_slash=HTTPMovedPermanently),
]
+ assert configurator_obj.add_renderer.calls == [
+ pretend.call("json", json_renderer_obj),
+ ]
+
+ assert json_renderer_cls.calls == [
+ pretend.call(sort_keys=True, separators=(",", ":")),
+ ]
diff --git a/tests/test_routes.py b/tests/test_routes.py
--- a/tests/test_routes.py
+++ b/tests/test_routes.py
@@ -59,6 +59,19 @@ def add_redirect(*args, **kwargs):
factory="warehouse.packaging.models:ProjectFactory",
traverse="/{name}/",
),
+ pretend.call(
+ "legacy.api.json.project",
+ "/pypi/{name}/json",
+ factory="warehouse.packaging.models:ProjectFactory",
+ traverse="/{name}",
+ ),
+ pretend.call(
+ "legacy.api.json.release",
+ "/pypi/{name}/{version}/json",
+ factory="warehouse.packaging.models:ProjectFactory",
+ traverse="/{name}/{version}",
+ ),
+ pretend.call("legacy.docs", "https://pythonhosted.org/{project}/"),
]
assert config.add_redirect.calls == [
| Remove JSON-P
It would be nice to remove JSON-P eventually. It has some security implications that we attempt to work around using a filtering method to prevent insecure or dangerous methods of invocation. It would be nice to just remove the need for this all together and switch everything over to using CORS (introduced in #242). We will probably need some sort of a sunset period though.
Support CORS on JSON API
Say I want to show project stats like monthly downloads on the project page. Fetching this from client side JavaScript allows me to retain a completely static project page (can be hosted on a CDN).
However, the JSON API does not set `Access-Control-Allow-Origin` header - no CORS support.
Test:
https://github.com/oberstet/scratchbox/blob/master/js/badges/pypi.html
For comparison, this is what GitHub does with it's API:
https://github.com/oberstet/scratchbox/blob/master/js/badges/github.html
If above would work, that makes #330 void (for me) - if the API doesn't get rate limited or such ..
| 2015-03-19T08:06:55Z | [] | [] |
|
pypi/warehouse | 449 | pypi__warehouse-449 | [
"409"
] | 1f347d239a3b2b0ca85dc47291c6c62e159d9403 | diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py
--- a/warehouse/accounts/views.py
+++ b/warehouse/accounts/views.py
@@ -33,7 +33,7 @@
def profile(user, request):
if user.username != request.matchdict.get("username", user.username):
return HTTPMovedPermanently(
- request.current_route_url(username=user.username),
+ request.current_route_path(username=user.username),
)
return {"user": user}
diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py
--- a/warehouse/legacy/api/json.py
+++ b/warehouse/legacy/api/json.py
@@ -32,7 +32,7 @@
def json_project(project, request):
if project.name != request.matchdict.get("name", project.name):
return HTTPMovedPermanently(
- request.current_route_url(name=project.name),
+ request.current_route_path(name=project.name),
)
try:
@@ -58,7 +58,7 @@ def json_release(release, request):
if project.name != request.matchdict.get("name", project.name):
return HTTPMovedPermanently(
- request.current_route_url(name=project.name),
+ request.current_route_path(name=project.name),
)
# We want to allow CORS here to enable anyone to fetch data from this API
diff --git a/warehouse/legacy/api/simple.py b/warehouse/legacy/api/simple.py
--- a/warehouse/legacy/api/simple.py
+++ b/warehouse/legacy/api/simple.py
@@ -57,7 +57,7 @@ def simple_detail(project, request):
if (project.normalized_name !=
request.matchdict.get("name", project.normalized_name)):
return HTTPMovedPermanently(
- request.current_route_url(name=project.normalized_name),
+ request.current_route_path(name=project.normalized_name),
)
# Get the latest serial number for this project.
diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py
--- a/warehouse/packaging/views.py
+++ b/warehouse/packaging/views.py
@@ -35,7 +35,7 @@
def project_detail(project, request):
if project.name != request.matchdict.get("name", project.name):
return HTTPMovedPermanently(
- request.current_route_url(name=project.name),
+ request.current_route_path(name=project.name),
)
try:
@@ -61,7 +61,7 @@ def release_detail(release, request):
if project.name != request.matchdict.get("name", project.name):
return HTTPMovedPermanently(
- request.current_route_url(name=project.name),
+ request.current_route_path(name=project.name),
)
# Get all of the registered versions for this Project, in order of newest
| diff --git a/tests/accounts/test_views.py b/tests/accounts/test_views.py
--- a/tests/accounts/test_views.py
+++ b/tests/accounts/test_views.py
@@ -31,7 +31,7 @@ def test_user_redirects_username(self, db_request):
else:
username = user.username.lower()
- db_request.current_route_url = pretend.call_recorder(
+ db_request.current_route_path = pretend.call_recorder(
lambda username: "/user/the-redirect/"
)
db_request.matchdict = {"username": username}
@@ -40,7 +40,7 @@ def test_user_redirects_username(self, db_request):
assert isinstance(result, HTTPMovedPermanently)
assert result.headers["Location"] == "/user/the-redirect/"
- assert db_request.current_route_url.calls == [
+ assert db_request.current_route_path.calls == [
pretend.call(username=user.username),
]
diff --git a/tests/legacy/api/test_json.py b/tests/legacy/api/test_json.py
--- a/tests/legacy/api/test_json.py
+++ b/tests/legacy/api/test_json.py
@@ -33,7 +33,7 @@ def test_normalizing_redirects(self, db_request):
name = project.name.upper()
db_request.matchdict = {"name": name}
- db_request.current_route_url = pretend.call_recorder(
+ db_request.current_route_path = pretend.call_recorder(
lambda name: "/project/the-redirect/"
)
@@ -41,7 +41,7 @@ def test_normalizing_redirects(self, db_request):
assert isinstance(resp, HTTPMovedPermanently)
assert resp.headers["Location"] == "/project/the-redirect/"
- assert db_request.current_route_url.calls == [
+ assert db_request.current_route_path.calls == [
pretend.call(name=project.name),
]
@@ -88,7 +88,7 @@ def test_normalizing_redirects(self, db_request):
name = release.project.name.upper()
db_request.matchdict = {"name": name}
- db_request.current_route_url = pretend.call_recorder(
+ db_request.current_route_path = pretend.call_recorder(
lambda name: "/project/the-redirect/3.0/"
)
@@ -96,7 +96,7 @@ def test_normalizing_redirects(self, db_request):
assert isinstance(resp, HTTPMovedPermanently)
assert resp.headers["Location"] == "/project/the-redirect/3.0/"
- assert db_request.current_route_url.calls == [
+ assert db_request.current_route_path.calls == [
pretend.call(name=release.project.name),
]
diff --git a/tests/legacy/api/test_simple.py b/tests/legacy/api/test_simple.py
--- a/tests/legacy/api/test_simple.py
+++ b/tests/legacy/api/test_simple.py
@@ -70,7 +70,7 @@ def test_redirects(self, pyramid_request):
project = pretend.stub(normalized_name="foo")
pyramid_request.matchdict["name"] = "Foo"
- pyramid_request.current_route_url = pretend.call_recorder(
+ pyramid_request.current_route_path = pretend.call_recorder(
lambda name: "/foobar/"
)
@@ -78,7 +78,7 @@ def test_redirects(self, pyramid_request):
assert isinstance(resp, HTTPMovedPermanently)
assert resp.headers["Location"] == "/foobar/"
- assert pyramid_request.current_route_url.calls == [
+ assert pyramid_request.current_route_path.calls == [
pretend.call(name="foo"),
]
diff --git a/tests/packaging/test_views.py b/tests/packaging/test_views.py
--- a/tests/packaging/test_views.py
+++ b/tests/packaging/test_views.py
@@ -38,7 +38,7 @@ def test_normalizing_redirects(self, db_request):
name = project.name.upper()
db_request.matchdict = {"name": name}
- db_request.current_route_url = pretend.call_recorder(
+ db_request.current_route_path = pretend.call_recorder(
lambda name: "/project/the-redirect/"
)
@@ -46,7 +46,7 @@ def test_normalizing_redirects(self, db_request):
assert isinstance(resp, HTTPMovedPermanently)
assert resp.headers["Location"] == "/project/the-redirect/"
- assert db_request.current_route_url.calls == [
+ assert db_request.current_route_path.calls == [
pretend.call(name=project.name),
]
@@ -93,7 +93,7 @@ def test_normalizing_redirects(self, db_request):
name = release.project.name.upper()
db_request.matchdict = {"name": name}
- db_request.current_route_url = pretend.call_recorder(
+ db_request.current_route_path = pretend.call_recorder(
lambda name: "/project/the-redirect/3.0/"
)
@@ -101,7 +101,7 @@ def test_normalizing_redirects(self, db_request):
assert isinstance(resp, HTTPMovedPermanently)
assert resp.headers["Location"] == "/project/the-redirect/3.0/"
- assert db_request.current_route_url.calls == [
+ assert db_request.current_route_path.calls == [
pretend.call(name=release.project.name),
]
| Use the path instead of url helpers
Pyramid has `request.*_url` helpers which we are currently using, which bake the scheme and host name that the site is being accessed into the link. Instead we should use the `request.*_path` helpers which will use explicit relative links instead.
| 2015-03-21T19:43:32Z | [] | [] |
|
pypi/warehouse | 451 | pypi__warehouse-451 | [
"410"
] | c0a009a90669e18eba73db217ed4a9214e3d0be1 | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -87,6 +87,7 @@
"setproctitle",
"sqlalchemy>=0.9",
"sqlalchemy-citext",
+ "structlog",
"transaction",
"tzf.pyramid_yml",
"WTForms>=2.0.0",
diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -51,6 +51,9 @@ def configure(settings=None):
# We want to load configuration from YAML files
config.include("tzf.pyramid_yml")
+ # Register our logging support
+ config.include(".logging")
+
# We'll want to use Jinja2 as our template system.
config.include("pyramid_jinja2")
diff --git a/warehouse/logging.py b/warehouse/logging.py
new file mode 100644
--- /dev/null
+++ b/warehouse/logging.py
@@ -0,0 +1,100 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging.config
+import uuid
+
+import structlog
+import structlog.stdlib
+
+
+RENDERER = structlog.processors.JSONRenderer()
+
+
+class StructlogFormatter(logging.Formatter):
+
+ def format(self, record):
+ # TODO: Figure out a better way of handling this besides just looking
+ # at the logger name, ideally this would have some way to
+ # really differentiate between log items which were logged by
+ # structlog and which were not.
+ if not record.name.startswith("warehouse."):
+ # TODO: Is there a better way to handle this? Maybe we can figure
+ # out a way to pass this through the structlog processors
+ # instead of manually duplicating the side effects here?
+ event_dict = {
+ "logger": record.name,
+ "level": record.levelname,
+ "event": record.msg,
+ }
+ record.msg = RENDERER(None, None, event_dict)
+
+ return super().format(record)
+
+
+def _create_id(request):
+ return str(uuid.uuid4())
+
+
+def _create_logger(request):
+ logger = structlog.get_logger("warehouse.request")
+
+ # This has to use **{} instead of just a kwarg because request.id is not
+ # an allowed kwarg name.
+ logger = logger.bind(**{"request.id": request.id})
+
+ return logger
+
+
+def includeme(config):
+ # Configure the standard library logging
+ logging.config.dictConfig({
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "structlog": {
+ "()": "warehouse.logging.StructlogFormatter",
+ },
+ },
+ "handlers": {
+ "primary": {
+ "class": "logging.StreamHandler",
+ "stream": "ext://sys.stdout",
+ "formatter": "structlog",
+ },
+ },
+ "root": {
+ "level": config.registry.settings.get("logging.level", "INFO"),
+ "handlers": ["primary"],
+ },
+ })
+
+ # Configure structlog
+ structlog.configure(
+ processors=[
+ structlog.stdlib.filter_by_level,
+ structlog.stdlib.add_logger_name,
+ structlog.stdlib.add_log_level,
+ structlog.stdlib.PositionalArgumentsFormatter(),
+ structlog.processors.StackInfoRenderer(),
+ structlog.processors.format_exc_info,
+ RENDERER,
+ ],
+ logger_factory=structlog.stdlib.LoggerFactory(),
+ wrapper_class=structlog.stdlib.BoundLogger,
+ )
+
+ # Give every request a unique identifer
+ config.add_request_method(_create_id, name="id", reify=True)
+
+ # Add a log method to every request.
+ config.add_request_method(_create_logger, name="log", reify=True)
diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py
--- a/warehouse/packaging/views.py
+++ b/warehouse/packaging/views.py
@@ -145,8 +145,7 @@ def packages(request):
# We need to get S3FS so that it support rb- as well as r-.
f = request.registry["filesystems"]["packages"].open(path, mode="rb")
except fs.errors.ResourceNotFoundError:
- # TODO: Log an error here, this file doesn't exists for some reason,
- # but it should because the database thinks it should.
+ request.log.error("missing file data", path=path)
raise HTTPNotFound from None
# If the path we're accessing is the path for the package itself, as
| diff --git a/tests/packaging/test_views.py b/tests/packaging/test_views.py
--- a/tests/packaging/test_views.py
+++ b/tests/packaging/test_views.py
@@ -222,11 +222,17 @@ def opener(path, mode):
)
db_request.matchdict["path"] = path
+ db_request.log = pretend.stub(
+ error=pretend.call_recorder(lambda event, **kw: None),
+ )
with pytest.raises(HTTPNotFound):
views.packages(db_request)
assert opener.calls == [pretend.call(path, mode="rb")]
+ assert db_request.log.error.calls == [
+ pretend.call("missing file data", path=path),
+ ]
def test_serves_package_file(self, db_request, pyramid_config):
memfs = fs.memoryfs.MemoryFS()
diff --git a/tests/test_config.py b/tests/test_config.py
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -140,6 +140,7 @@ def __init__(self):
assert result is configurator_obj
assert configurator_obj.include.calls == [
pretend.call("tzf.pyramid_yml"),
+ pretend.call(".logging"),
pretend.call("pyramid_jinja2"),
pretend.call("pyramid_tm"),
pretend.call("pyramid_services"),
diff --git a/tests/test_logging.py b/tests/test_logging.py
new file mode 100644
--- /dev/null
+++ b/tests/test_logging.py
@@ -0,0 +1,152 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import logging.config
+import uuid
+
+from unittest import mock
+
+import pretend
+import pytest
+import structlog
+import structlog.stdlib
+
+from warehouse import logging as wlogging
+
+
+class TestStructlogFormatter:
+
+ def test_warehouse_logger_no_renderer(self):
+ formatter = wlogging.StructlogFormatter()
+ record = logging.LogRecord(
+ "warehouse.request",
+ logging.INFO,
+ None, None, "the message", None, None,
+ )
+
+ assert formatter.format(record) == "the message"
+
+ def test_non_warehouse_logger_renders(self):
+ formatter = wlogging.StructlogFormatter()
+ record = logging.LogRecord(
+ "another.logger",
+ logging.INFO,
+ None, None, "the message", None, None,
+ )
+
+ assert json.loads(formatter.format(record)) == {
+ "logger": "another.logger",
+ "level": "INFO",
+ "event": "the message",
+ }
+
+
+def test_create_id(monkeypatch):
+ uuid4 = pretend.call_recorder(lambda: "a fake uuid")
+ monkeypatch.setattr(uuid, "uuid4", uuid4)
+
+ request = pretend.stub()
+
+ assert wlogging._create_id(request) == "a fake uuid"
+
+
+def test_create_logging(monkeypatch):
+ bound_logger = pretend.stub()
+ logger = pretend.stub(
+ bind=pretend.call_recorder(lambda **kw: bound_logger),
+ )
+ get_logger = pretend.call_recorder(lambda name: logger)
+ monkeypatch.setattr(structlog, "get_logger", get_logger)
+
+ request = pretend.stub(id="request id")
+
+ assert wlogging._create_logger(request) is bound_logger
+ assert get_logger.calls == [pretend.call("warehouse.request")]
+ assert logger.bind.calls == [pretend.call(**{"request.id": "request id"})]
+
+
+@pytest.mark.parametrize(
+ ("settings", "expected_level"),
+ [
+ ({"logging.level": "DEBUG"}, "DEBUG"),
+ ({}, "INFO"),
+ ],
+)
+def test_includeme(monkeypatch, settings, expected_level):
+ dict_config = pretend.call_recorder(lambda c: None)
+ monkeypatch.setattr(logging.config, "dictConfig", dict_config)
+
+ configure = pretend.call_recorder(lambda **kw: None)
+ monkeypatch.setattr(structlog, "configure", configure)
+
+ config = pretend.stub(
+ registry=pretend.stub(settings=settings),
+ add_request_method=pretend.call_recorder(lambda fn, name, reify: None),
+ )
+
+ wlogging.includeme(config)
+
+ assert dict_config.calls == [
+ pretend.call({
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "structlog": {
+ "()": "warehouse.logging.StructlogFormatter",
+ },
+ },
+ "handlers": {
+ "primary": {
+ "class": "logging.StreamHandler",
+ "stream": "ext://sys.stdout",
+ "formatter": "structlog",
+ },
+ },
+ "root": {
+ "level": expected_level,
+ "handlers": ["primary"],
+ },
+ }),
+ ]
+ assert configure.calls == [
+ pretend.call(
+ processors=[
+ structlog.stdlib.filter_by_level,
+ structlog.stdlib.add_logger_name,
+ structlog.stdlib.add_log_level,
+ mock.ANY,
+ mock.ANY,
+ structlog.processors.format_exc_info,
+ wlogging.RENDERER,
+ ],
+ logger_factory=mock.ANY,
+ wrapper_class=structlog.stdlib.BoundLogger,
+ ),
+ ]
+ assert isinstance(
+ configure.calls[0].kwargs["processors"][3],
+ structlog.stdlib.PositionalArgumentsFormatter,
+ )
+ assert isinstance(
+ configure.calls[0].kwargs["processors"][4],
+ structlog.processors.StackInfoRenderer,
+ )
+ assert isinstance(
+ configure.calls[0].kwargs["logger_factory"],
+ structlog.stdlib.LoggerFactory,
+ )
+ assert config.add_request_method.calls == [
+ pretend.call(wlogging._create_id, name="id", reify=True),
+ pretend.call(wlogging._create_logger, name="log", reify=True),
+ ]
| Implement Logging
We need to have a good logging setup in Warehouse, ideally this would be something based off of [structlog](https://structlog.readthedocs.org/).
| 2015-03-22T00:12:37Z | [] | [] |
|
pypi/warehouse | 500 | pypi__warehouse-500 | [
"415",
"494"
] | f1c39453ef59e539bd1e084682d1bd4a10b99f14 | diff --git a/warehouse/accounts/__init__.py b/warehouse/accounts/__init__.py
--- a/warehouse/accounts/__init__.py
+++ b/warehouse/accounts/__init__.py
@@ -19,6 +19,8 @@
BasicAuthAuthenticationPolicy, SessionAuthenticationPolicy,
)
+REDIRECT_FIELD_NAME = 'next'
+
def _login(username, password, request):
login_service = request.find_service(ILoginService, context=None)
diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py
--- a/warehouse/accounts/views.py
+++ b/warehouse/accounts/views.py
@@ -14,12 +14,14 @@
from pyramid.security import remember, forget
from pyramid.view import view_config
+from warehouse.accounts import REDIRECT_FIELD_NAME
from warehouse.accounts.forms import LoginForm
from warehouse.accounts.interfaces import ILoginService
from warehouse.cache.origin import origin_cache
from warehouse.cache.http import cache_control
from warehouse.csrf import csrf_protect
from warehouse.sessions import uses_session
+from warehouse.utils.http import is_safe_url
@view_config(
@@ -48,19 +50,23 @@ def profile(user, request):
renderer="accounts/login.html",
decorator=[csrf_protect("accounts.login"), uses_session],
)
-def login(request, _form_class=LoginForm):
- # TODO: If already logged in just redirect to ?next=
+def login(request, redirect_field_name=REDIRECT_FIELD_NAME,
+ _form_class=LoginForm):
# TODO: Logging in should reset request.user
# TODO: Configure the login view as the default view for not having
# permission to view something.
login_service = request.find_service(ILoginService, context=None)
+ redirect_to = request.POST.get(redirect_field_name,
+ request.GET.get(redirect_field_name))
+
form = _form_class(request.POST, login_service=login_service)
if request.method == "POST" and form.validate():
# Get the user id for the given username.
- userid = login_service.find_userid(form.username.data)
+ username = form.username.data
+ userid = login_service.find_userid(username)
# We have a session factory associated with this request, so in order
# to protect against session fixation attacks we're going to make sure
@@ -96,13 +102,23 @@ def login(request, _form_class=LoginForm):
# and we don't want to continue using the old one.
request.session.new_csrf_token()
+ # If the user-originating redirection url is not safe, then redirect to
+ # the index instead.
+ if (not redirect_to or
+ not is_safe_url(url=redirect_to, host=request.host)):
+ redirect_to = "/"
+
# Now that we're logged in we'll want to redirect the user to either
# where they were trying to go originally, or to the default view.
- # TODO: Implement ?next= support.
- # TODO: Figure out a better way to handle the "default view".
- return HTTPSeeOther("/", headers=dict(headers))
+ return HTTPSeeOther(redirect_to, headers=dict(headers))
- return {"form": form}
+ return {
+ "form": form,
+ "redirect": {
+ "field": REDIRECT_FIELD_NAME,
+ "data": redirect_to,
+ },
+ }
@view_config(
@@ -110,10 +126,13 @@ def login(request, _form_class=LoginForm):
renderer="accounts/logout.html",
decorator=[csrf_protect("accounts.logout"), uses_session],
)
-def logout(request):
+def logout(request, redirect_field_name=REDIRECT_FIELD_NAME):
# TODO: If already logged out just redirect to ?next=
# TODO: Logging out should reset request.user
+ redirect_to = request.POST.get(redirect_field_name,
+ request.GET.get(redirect_field_name))
+
if request.method == "POST":
# A POST to the logout view tells us to logout. There's no form to
# validate here becuse there's no data. We should be protected against
@@ -133,10 +152,14 @@ def logout(request):
# to handle this for us.
request.session.invalidate()
+ # If the user-originating redirection url is not safe, then redirect to
+ # the index instead.
+ if (not redirect_to or
+ not is_safe_url(url=redirect_to, host=request.host)):
+ redirect_to = "/"
+
# Now that we're logged out we'll want to redirect the user to either
# where they were originally, or to the default view.
- # TODO: Implement ?next= support.
- # TODO: Figure out a better way to handle the "default view".
- return HTTPSeeOther("/", headers=dict(headers))
+ return HTTPSeeOther(redirect_to, headers=dict(headers))
- return {}
+ return {"redirect": {"field": REDIRECT_FIELD_NAME, "data": redirect_to}}
diff --git a/warehouse/utils/http.py b/warehouse/utils/http.py
new file mode 100644
--- /dev/null
+++ b/warehouse/utils/http.py
@@ -0,0 +1,49 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unicodedata
+from urllib.parse import urlparse
+
+
+# FROM https://github.com/django/django/blob/
+# 011a54315e46acdf288003566b8570440f5ac985/django/utils/http.py
+def is_safe_url(url, host=None):
+ """
+ Return ``True`` if the url is a safe redirection (i.e. it doesn't point to
+ a different host and uses a safe scheme).
+ Always returns ``False`` on an empty url.
+ """
+ if url is not None:
+ url = url.strip()
+ if not url:
+ return False
+ # Chrome treats \ completely as /
+ url = url.replace('\\', '/')
+ # Chrome considers any URL with more than two slashes to be absolute, but
+ # urlparse is not so flexible. Treat any url with three slashes as unsafe.
+ if url.startswith('///'):
+ return False
+ url_info = urlparse(url)
+ # Forbid URLs like http:///example.com - with a scheme, but without a
+ # hostname.
+ # In that URL, example.com is not the hostname but, a path component.
+ # However, Chrome will still consider example.com to be the hostname,
+ # so we must not allow this syntax.
+ if not url_info.netloc and url_info.scheme:
+ return False
+ # Forbid URLs that start with control characters. Some browsers (like
+ # Chrome) ignore quite a few control characters at the start of a
+ # URL and might consider the URL as scheme relative.
+ if unicodedata.category(url[0])[0] == 'C':
+ return False
+ return ((not url_info.netloc or url_info.netloc == host) and
+ (not url_info.scheme or url_info.scheme in {'http', 'https'}))
| diff --git a/tests/accounts/test_views.py b/tests/accounts/test_views.py
--- a/tests/accounts/test_views.py
+++ b/tests/accounts/test_views.py
@@ -51,7 +51,8 @@ def test_returns_user(self, db_request):
class TestLogin:
- def test_get_returns_form(self, pyramid_request):
+ @pytest.mark.parametrize("next_url", [None, "/foo/bar/", "/wat/"])
+ def test_get_returns_form(self, pyramid_request, next_url):
login_service = pretend.stub()
pyramid_request.find_service = pretend.call_recorder(
lambda iface, context: login_service
@@ -59,9 +60,15 @@ def test_get_returns_form(self, pyramid_request):
form_obj = pretend.stub()
form_class = pretend.call_recorder(lambda d, login_service: form_obj)
+ if next_url is not None:
+ pyramid_request.GET["next"] = next_url
+
result = views.login(pyramid_request, _form_class=form_class)
- assert result == {"form": form_obj}
+ assert result == {
+ "form": form_obj,
+ "redirect": {"field": "next", "data": next_url},
+ }
assert pyramid_request.find_service.calls == [
pretend.call(ILoginService, context=None),
]
@@ -69,18 +76,24 @@ def test_get_returns_form(self, pyramid_request):
pretend.call(pyramid_request.POST, login_service=login_service),
]
- def test_post_invalid_returns_form(self, pyramid_request):
+ @pytest.mark.parametrize("next_url", [None, "/foo/bar/", "/wat/"])
+ def test_post_invalid_returns_form(self, pyramid_request, next_url):
login_service = pretend.stub()
pyramid_request.find_service = pretend.call_recorder(
lambda iface, context: login_service
)
pyramid_request.method = "POST"
+ if next_url is not None:
+ pyramid_request.POST["next"] = next_url
form_obj = pretend.stub(validate=pretend.call_recorder(lambda: False))
form_class = pretend.call_recorder(lambda d, login_service: form_obj)
result = views.login(pyramid_request, _form_class=form_class)
- assert result == {"form": form_obj}
+ assert result == {
+ "form": form_obj,
+ "redirect": {"field": "next", "data": next_url},
+ }
assert pyramid_request.find_service.calls == [
pretend.call(ILoginService, context=None),
]
@@ -127,29 +140,71 @@ def test_post_validate_redirects(self, monkeypatch, pyramid_request,
result = views.login(pyramid_request, _form_class=form_class)
assert isinstance(result, HTTPSeeOther)
+
assert result.headers["Location"] == "/"
assert result.headers["foo"] == "bar"
- assert pyramid_request.find_service.calls == [
- pretend.call(ILoginService, context=None),
- ]
+
assert form_class.calls == [
pretend.call(pyramid_request.POST, login_service=login_service),
]
assert form_obj.validate.calls == [pretend.call()]
+
assert login_service.find_userid.calls == [pretend.call("theuser")]
+
if with_user:
assert new_session == {}
else:
assert new_session == {"a": "b", "foo": "bar"}
- assert pyramid_request.session.invalidate.calls == [pretend.call()]
+
assert remember.calls == [pretend.call(pyramid_request, 1)]
+ assert pyramid_request.session.invalidate.calls == [pretend.call()]
+ assert pyramid_request.find_service.calls == [
+ pretend.call(ILoginService, context=None),
+ ]
assert pyramid_request.session.new_csrf_token.calls == [pretend.call()]
+ @pytest.mark.parametrize(
+ # The set of all possible next URLs. Since this set is infinite, we
+ # test only a finite set of reasonable URLs.
+ ("expected_next_url, observed_next_url"),
+ [
+ ("/security/", "/security/"),
+ ("http://example.com", "/"),
+ ],
+ )
+ def test_post_validate_no_redirects(self, pyramid_request,
+ expected_next_url, observed_next_url):
+ login_service = pretend.stub(
+ find_userid=pretend.call_recorder(lambda username: 1),
+ )
+ pyramid_request.find_service = pretend.call_recorder(
+ lambda iface, context: login_service
+ )
+ pyramid_request.method = "POST"
+ pyramid_request.POST["next"] = expected_next_url
+
+ form_obj = pretend.stub(
+ validate=pretend.call_recorder(lambda: True),
+ username=pretend.stub(data="theuser"),
+ )
+ form_class = pretend.call_recorder(lambda d, login_service: form_obj)
+
+ result = views.login(pyramid_request, _form_class=form_class)
+
+ assert isinstance(result, HTTPSeeOther)
+
+ assert result.headers["Location"] == observed_next_url
+
class TestLogout:
- def test_get_returns_empty(self, pyramid_request):
- assert views.logout(pyramid_request) == {}
+ @pytest.mark.parametrize("next_url", [None, "/foo/bar/", "/wat/"])
+ def test_get_returns_empty(self, pyramid_request, next_url):
+ if next_url is not None:
+ pyramid_request.GET["next"] = next_url
+
+ assert views.logout(pyramid_request) == \
+ {"redirect": {"field": "next", "data": next_url}}
def test_post_forgets_user(self, monkeypatch, pyramid_request):
forget = pretend.call_recorder(lambda request: [("foo", "bar")])
@@ -167,3 +222,23 @@ def test_post_forgets_user(self, monkeypatch, pyramid_request):
assert result.headers["foo"] == "bar"
assert forget.calls == [pretend.call(pyramid_request)]
assert pyramid_request.session.invalidate.calls == [pretend.call()]
+
+ @pytest.mark.parametrize(
+ # The set of all possible next URLs. Since this set is infinite, we
+ # test only a finite set of reasonable URLs.
+ ("expected_next_url, observed_next_url"),
+ [
+ ("/security/", "/security/"),
+ ("http://example.com", "/"),
+ ],
+ )
+ def test_post_redirects_user(self, pyramid_request, expected_next_url,
+ observed_next_url):
+ pyramid_request.method = "POST"
+
+ pyramid_request.POST["next"] = expected_next_url
+
+ result = views.logout(pyramid_request)
+
+ assert isinstance(result, HTTPSeeOther)
+ assert result.headers["Location"] == observed_next_url
diff --git a/tests/utils/test_http.py b/tests/utils/test_http.py
new file mode 100644
--- /dev/null
+++ b/tests/utils/test_http.py
@@ -0,0 +1,65 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+from warehouse.utils.http import is_safe_url
+
+
+# (MOSTLY) FROM https://github.com/django/django/blob/
+# 011a54315e46acdf288003566b8570440f5ac985/tests/utils_tests/test_http.py
+class TestIsSafeUrl:
+
+ @pytest.mark.parametrize(
+ "url",
+ [
+ None,
+ 'http://example.com',
+ 'http:///example.com',
+ 'https://example.com',
+ 'ftp://exampel.com',
+ r'\\example.com',
+ r'\\\example.com',
+ r'/\\/example.com',
+ r'\\\example.com',
+ r'\\example.com',
+ r'\\//example.com',
+ r'/\/example.com',
+ r'\/example.com',
+ r'/\example.com',
+ 'http:///example.com',
+ 'http:/\//example.com',
+ 'http:\/example.com',
+ 'http:/\example.com',
+ 'javascript:alert("XSS")',
+ '\njavascript:alert(x)',
+ '\x08//example.com',
+ '\n',
+ ],
+ )
+ def test_rejects_bad_url(self, url):
+ assert not is_safe_url(url, host="testserver")
+
+ @pytest.mark.parametrize(
+ "url",
+ [
+ '/view/?param=http://example.com',
+ '/view/?param=https://example.com',
+ '/view?param=ftp://exampel.com',
+ 'view/?param=//example.com',
+ 'https://testserver/',
+ 'HTTPS://testserver/',
+ '//testserver/',
+ '/url%20with%20spaces/',
+ ],
+ )
+ def test_accepts_good_url(self, url):
+ assert is_safe_url(url, host="testserver")
| Handle next redirect support for login/logout
This needs to ensure that we securely handle this, take a look at what Django does to validate a redirect.
Redirect after login or logout (#fix 415).
Again, probably not perfect, but good enough.
I copied some code from Django! Namely, for `warehouse.utils.http.is_safe_url`.
Someone else (probably @dstufft) needs to review legality and security, though.
| I'm looking at this
| 2015-04-18T15:14:21Z | [] | [] |
pypi/warehouse | 505 | pypi__warehouse-505 | [
"492"
] | 2890d56de898a8161598ec106d5a183c2d18ee34 | diff --git a/warehouse/routes.py b/warehouse/routes.py
--- a/warehouse/routes.py
+++ b/warehouse/routes.py
@@ -12,6 +12,8 @@
def includeme(config):
+ config.add_route("index", "/")
+
# Accounts
config.add_route(
"accounts.profile",
diff --git a/warehouse/views.py b/warehouse/views.py
new file mode 100644
--- /dev/null
+++ b/warehouse/views.py
@@ -0,0 +1,37 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyramid.view import view_config
+
+from warehouse.packaging.models import Project, Release, File
+from warehouse.accounts.models import User
+
+
+@view_config(
+ route_name="index",
+ renderer="index.html",
+)
+def index(request):
+ latest_updated_releases = request.db.query(Release)\
+ .order_by(Release.created.desc())[:20]
+ num_projects = request.db.query(Project).count()
+ num_users = request.db.query(User).count()
+ num_files = request.db.query(File).count()
+ num_releases = request.db.query(Release).count()
+
+ return {
+ 'latest_updated_releases': latest_updated_releases,
+ 'num_projects': num_projects,
+ 'num_users': num_users,
+ 'num_releases': num_releases,
+ 'num_files': num_files,
+ }
| diff --git a/tests/legacy/api/test_simple.py b/tests/legacy/api/test_simple.py
--- a/tests/legacy/api/test_simple.py
+++ b/tests/legacy/api/test_simple.py
@@ -116,6 +116,8 @@ def test_with_files_no_serial(self, db_request):
)
for r in releases
]
+ # let's assert the result is ordered by string comparison of filename
+ files = sorted(files, key=lambda key: key.filename)
db_request.matchdict["name"] = project.normalized_name
user = UserFactory.create()
JournalEntryFactory.create(submitted_by=user.username)
@@ -139,6 +141,8 @@ def test_with_files_with_seiral(self, db_request):
)
for r in releases
]
+ # let's assert the result is ordered by string comparison of filename
+ files = sorted(files, key=lambda key: key.filename)
db_request.matchdict["name"] = project.normalized_name
user = UserFactory.create()
je = JournalEntryFactory.create(
diff --git a/tests/test_routes.py b/tests/test_routes.py
--- a/tests/test_routes.py
+++ b/tests/test_routes.py
@@ -37,6 +37,7 @@ def add_redirect(*args, **kwargs):
includeme(config)
assert config.add_route.calls == [
+ pretend.call('index', '/'),
pretend.call(
"accounts.profile",
"/user/{username}/",
diff --git a/tests/test_views.py b/tests/test_views.py
new file mode 100644
--- /dev/null
+++ b/tests/test_views.py
@@ -0,0 +1,45 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import datetime
+
+from warehouse.views import index
+
+from .common.db.packaging import (
+ ProjectFactory, ReleaseFactory, FileFactory,
+)
+from .common.db.accounts import UserFactory
+
+
+class TestIndex:
+
+ def test_index(self, db_request):
+
+ project = ProjectFactory.create()
+ release1 = ReleaseFactory.create(project=project)
+ release1.created = datetime.date(2011, 1, 1)
+ release2 = ReleaseFactory.create(project=project)
+ release2.created = datetime.date(2012, 1, 1)
+ FileFactory.create(
+ release=release1,
+ filename="{}-{}.tar.gz".format(project.name, release1.version),
+ python_version="source",
+ )
+ UserFactory.create()
+
+ assert index(db_request) == {
+ # assert that ordering is correct
+ 'latest_updated_releases': [release2, release1],
+ 'num_projects': 1,
+ 'num_users': 1,
+ 'num_releases': 2,
+ 'num_files': 1,
+ }
| Add basic index view
No tests yet, I'll add them once we agree this is useful :)
![2015-04-14-155911_2265x1366_scrot](https://cloud.githubusercontent.com/assets/126339/7146129/59a16724-e2bf-11e4-8658-36268ab14be7.png)
| 2015-04-20T03:28:06Z | [] | [] |
|
pypi/warehouse | 578 | pypi__warehouse-578 | [
"333"
] | 8e63b450a710bb3d62a7c1574655ff69f4956337 | diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py
--- a/warehouse/legacy/api/json.py
+++ b/warehouse/legacy/api/json.py
@@ -71,6 +71,18 @@ def json_release(release, request):
# We want to allow CORS here to enable anyone to fetch data from this API
request.response.headers["Access-Control-Allow-Origin"] = "*"
+ request.response.headers["Access-Control-Allow-Headers"] = ", ".join([
+ "Content-Type",
+ "If-Match",
+ "If-Modified-Since",
+ "If-None-Match",
+ "If-Unmodified-Since",
+ ])
+ request.response.headers["Access-Control-Allow-Methods"] = "GET"
+ request.response.headers["Access-Control-Max-Age"] = "86400"
+ request.response.headers["Access-Control-Expose-Headers"] = ", ".join([
+ "X-PyPI-Last-Serial",
+ ])
# Get the latest serial number for this project.
serial = (
| diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py
--- a/tests/unit/legacy/api/test_json.py
+++ b/tests/unit/legacy/api/test_json.py
@@ -141,6 +141,13 @@ def test_detail_renders(self, pyramid_config, db_request):
headers = db_request.response.headers
assert headers["Access-Control-Allow-Origin"] == "*"
+ assert headers["Access-Control-Allow-Headers"] == (
+ "Content-Type, If-Match, If-Modified-Since, If-None-Match, "
+ "If-Unmodified-Since"
+ )
+ assert headers["Access-Control-Allow-Methods"] == "GET"
+ assert headers["Access-Control-Max-Age"] == "86400"
+ assert headers["Access-Control-Expose-Headers"] == "X-PyPI-Last-Serial"
assert headers["X-PyPI-Last-Serial"] == je.id
assert result == {
| Support CORS on JSON API
Say I want to show project stats like monthly downloads on the project page. Fetching this from client side JavaScript allows me to retain a completely static project page (can be hosted on a CDN).
However, the JSON API does not set `Access-Control-Allow-Origin` header - no CORS support.
Test:
https://github.com/oberstet/scratchbox/blob/master/js/badges/pypi.html
For comparison, this is what GitHub does with it's API:
https://github.com/oberstet/scratchbox/blob/master/js/badges/github.html
If above would work, that makes #330 void (for me) - if the API doesn't get rate limited or such ..
| Still fails for me: "XMLHttpRequest cannot load https://pypi.python.org/PyPI/autobahn/json. Request header field Content-type is not allowed by Access-Control-Allow-Headers."
Hrmph. Ok
I'll add more headers to that then :)
Thanks!
The HTML here https://github.com/oberstet/scratchbox/blob/master/js/badges/pypi.html demonstrates the issue ..
I took a look at your file, and if I edited out `req.setRequestHeader("Content-type", "application/json; charset=utf-8");` it worked fine. You're not sending a body along so there's no real reason to set a `Content-Type` on the request is there?
| 2015-08-04T20:42:37Z | [] | [] |
pypi/warehouse | 579 | pypi__warehouse-579 | [
"417",
"536"
] | ef209a71ea8370fd317ae9f436ffdfab263e949a | diff --git a/warehouse/legacy/api/pypi.py b/warehouse/legacy/api/pypi.py
--- a/warehouse/legacy/api/pypi.py
+++ b/warehouse/legacy/api/pypi.py
@@ -23,7 +23,7 @@
from pyramid.httpexceptions import HTTPBadRequest, HTTPForbidden, HTTPGone
from pyramid.response import Response
-from pyramid.view import view_config
+from pyramid.view import forbidden_view_config, view_config
from sqlalchemy import func
from sqlalchemy.orm.exc import NoResultFound
@@ -694,3 +694,11 @@ def submit(request):
HTTPGone,
"This API is no longer supported, instead simply upload the file.",
)
+
+
+@forbidden_view_config(request_param=":action")
+def forbidden_legacy(exc, request):
+ # We're not going to do anything amazing here, this just exists to override
+ # the default forbidden handler we have which does redirects to the login
+ # view, which we do not want on this API.
+ return exc
diff --git a/warehouse/views.py b/warehouse/views.py
--- a/warehouse/views.py
+++ b/warehouse/views.py
@@ -10,12 +10,31 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from pyramid.view import view_config
+from pyramid.httpexceptions import HTTPSeeOther
+from pyramid.view import forbidden_view_config, view_config
+from warehouse.accounts import REDIRECT_FIELD_NAME
from warehouse.packaging.models import Project, Release, File
from warehouse.accounts.models import User
+@forbidden_view_config()
+def forbidden(exc, request):
+ # If the forbidden error is because the user isn't logged in, then we'll
+ # redirect them to the log in page.
+ if request.authenticated_userid is None:
+ url = request.route_url(
+ "accounts.login",
+ _query={REDIRECT_FIELD_NAME: request.path_qs},
+ )
+ return HTTPSeeOther(url)
+
+ # If we've reached here, then the user is logged in and they are genuinely
+ # not allowed to access this page.
+ # TODO: Style the forbidden page.
+ return exc
+
+
@view_config(
route_name="index",
renderer="index.html",
| diff --git a/tests/unit/legacy/api/test_pypi.py b/tests/unit/legacy/api/test_pypi.py
--- a/tests/unit/legacy/api/test_pypi.py
+++ b/tests/unit/legacy/api/test_pypi.py
@@ -1149,3 +1149,9 @@ def test_submit(pyramid_request):
assert resp.status_code == 410
assert resp.status == \
"410 This API is no longer supported, instead simply upload the file."
+
+
+def test_forbidden_legacy():
+ exc, request = pretend.stub(), pretend.stub()
+ resp = pypi.forbidden_legacy(exc, request)
+ assert resp is exc
diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py
--- a/tests/unit/test_views.py
+++ b/tests/unit/test_views.py
@@ -9,9 +9,12 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+
import datetime
-from warehouse.views import index
+import pretend
+
+from warehouse.views import forbidden, index
from ..common.db.packaging import (
ProjectFactory, ReleaseFactory, FileFactory,
@@ -19,6 +22,30 @@
from ..common.db.accounts import UserFactory
+class TestForbiddenView:
+
+ def test_logged_in_returns_exception(self):
+ exc, request = pretend.stub(), pretend.stub(authenticated_userid=1)
+ resp = forbidden(exc, request)
+ assert resp is exc
+
+ def test_logged_out_redirects_login(self):
+ exc = pretend.stub()
+ request = pretend.stub(
+ authenticated_userid=None,
+ path_qs="/foo/bar/?b=s",
+ route_url=pretend.call_recorder(
+ lambda route, _query: "/accounts/login/?next=/foo/bar/%3Fb%3Ds"
+ ),
+ )
+
+ resp = forbidden(exc, request)
+
+ assert resp.status_code == 303
+ assert resp.headers["Location"] == \
+ "/accounts/login/?next=/foo/bar/%3Fb%3Ds"
+
+
class TestIndex:
def test_index(self, db_request):
| Redirect users to the login view when they don't have permission
We need to configure Pyramid so that it will redirect users to the login view whenever they don't have permission to do something (maybe conditionally on if they are already logged in).
Set login view as forbidden_view_config
Fixes #417
| I'll pick this one up.
@dstufft – do we have a list of places that need redirecting?
@ghickman no, but Pyramid has a built in method for this where you can tell it to do something like this for the forbidden view. I think it's a view decoratored like:
`@view_config(context=HTTPForbidden)`
which will cause that view to be used anytime a `HTTPForbidden` exception is raised.
I've run out of time to get a patch out for this unfortunately. There's a working view in my [branch](https://github.com/ghickman/warehouse/tree/redirect-unauthed-users) but it's untested. If anyone else wants to pick up this issue then please feel free.
I don't think we should just call the login view, I think we should have a separate view which does nothing but redirects to the login view. However we also need a predicate on this so that it doesn't happen for anything registered with `warehouse.legacy.action_routing.add_pypi_action_route`. So probably we just need a custom predicate that looks something like:
``` python
def not_legacy_pypi_action(info, request):
return not request.path.startswith("/pypi") and ":action" not in request.params
```
A better idea is to do something like:
``` python
@forbidden_view_config()
def redirect_to_login(request):
return Redirect To Login
@forbidden_view_config(request_param=":action")
def do403(request):
return Do a 403
```
Cool! I will update my pull request then.
| 2015-08-04T21:53:55Z | [] | [] |
pypi/warehouse | 584 | pypi__warehouse-584 | [
"573"
] | 1bcde5b597e2e6bda382248febac6915e37fc740 | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -88,6 +88,7 @@
"pyramid_tm>=0.12",
"readme>=0.5.1",
"redis",
+ "rfc3986",
"setproctitle",
"setuptools",
"sqlalchemy>=0.9",
diff --git a/warehouse/legacy/api/pypi.py b/warehouse/legacy/api/pypi.py
--- a/warehouse/legacy/api/pypi.py
+++ b/warehouse/legacy/api/pypi.py
@@ -20,6 +20,7 @@
import pkg_resources
import wtforms
import wtforms.validators
+from rfc3986 import uri_reference
from pyramid.httpexceptions import HTTPBadRequest, HTTPForbidden, HTTPGone
from pyramid.response import Response
@@ -194,7 +195,10 @@ def _validate_project_url(value):
if not url:
raise wtforms.validators.ValidationError("Must have an URL.")
- # TODO: Actually validate that the URL is a valid URL.
+ url = uri_reference(url)
+ url = url.normalize()
+ if not (url.is_valid() and url.scheme in ('http', 'https')):
+ raise wtforms.validators.ValidationError("Invalid URL.")
def _validate_project_url_list(form, field):
| diff --git a/tests/unit/legacy/api/test_pypi.py b/tests/unit/legacy/api/test_pypi.py
--- a/tests/unit/legacy/api/test_pypi.py
+++ b/tests/unit/legacy/api/test_pypi.py
@@ -230,6 +230,8 @@ def test_validate_project_url_valid(self, project_url):
", https://pypi.python.org/",
"Home, ",
("A" * 33) + ", https://example.com/",
+ "Home, I am a banana",
+ "Home, ssh://foobar",
],
)
def test_validate_project_url_invalid(self, project_url):
| Validate that Project URLs are actually URLs on Upload
As the title says, validate the project url metadata on upload, probably with [rfc3986](https://pypi.python.org/pypi/rfc3986).
| Grabbing this. PR incoming.
| 2015-08-05T04:32:20Z | [] | [] |
pypi/warehouse | 597 | pypi__warehouse-597 | [
"327"
] | dcd89a3439d9c05e3dffa44f748c5f4b1e479a93 | diff --git a/warehouse/__about__.py b/warehouse/__about__.py
--- a/warehouse/__about__.py
+++ b/warehouse/__about__.py
@@ -10,17 +10,32 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import os.path
+
__all__ = [
- "__title__", "__summary__", "__uri__", "__version__", "__author__",
- "__email__", "__license__", "__copyright__",
+ "__title__", "__summary__", "__uri__", "__version__", "__commit__",
+ "__author__", "__email__", "__license__", "__copyright__",
]
+
+try:
+ base_dir = os.path.dirname(os.path.abspath(__file__))
+except NameError:
+ base_dir = None
+
+
__title__ = "warehouse"
__summary__ = "Next Generation Python Package Repository"
__uri__ = "https://warehouse.python.org/"
__version__ = "15.0.dev0"
+if base_dir is not None and os.path.exists(os.path.join(base_dir, ".commit")):
+ with open(os.path.join(base_dir, ".commit")) as fp:
+ __commit__ = fp.read().strip()
+else:
+ __commit__ = None
+
__author__ = "The Python Packaging Authority"
__email__ = "donald@stufft.io"
diff --git a/warehouse/__init__.py b/warehouse/__init__.py
--- a/warehouse/__init__.py
+++ b/warehouse/__init__.py
@@ -11,12 +11,12 @@
# limitations under the License.
from warehouse.__about__ import (
- __author__, __copyright__, __email__, __license__, __summary__, __title__,
- __uri__, __version__,
+ __author__, __commit__, __copyright__, __email__, __license__, __summary__,
+ __title__, __uri__, __version__,
)
__all__ = [
- "__author__", "__copyright__", "__email__", "__license__", "__summary__",
- "__title__", "__uri__", "__version__",
+ "__author__", "__commit__", "__copyright__", "__email__", "__license__",
+ "__summary__", "__title__", "__uri__", "__version__",
]
diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -22,6 +22,7 @@
from pyramid.httpexceptions import HTTPMovedPermanently
from pyramid.response import Response
+from warehouse import __commit__
from warehouse.utils.static import WarehouseCacheBuster
@@ -101,6 +102,9 @@ def configure(settings=None):
if settings is None:
settings = {}
+ # Add information about the current copy of the code.
+ settings.setdefault("warehouse.commit", __commit__)
+
# Set the environment from an environment variable, if one hasn't already
# been set.
maybe_set(
| diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py
--- a/tests/unit/test_config.py
+++ b/tests/unit/test_config.py
@@ -264,6 +264,7 @@ def __init__(self):
expected_settings = {
"warehouse.env": environment,
+ "warehouse.commit": None,
"site.name": "Warehouse",
}
| Link to GitHub from the homepage
It'd be nice to have a link to the GitHub repo in the header/footer. It makes it easier for users to file bugs, and avoids users getting confused between this repo and https://github.com/crateio/warehouse .
| 2015-08-08T21:26:02Z | [] | [] |
|
pypi/warehouse | 598 | pypi__warehouse-598 | [
"256"
] | 7c4189af14d4a0025333056f0a2e919add71a852 | diff --git a/warehouse/cli/serve.py b/warehouse/cli/serve.py
--- a/warehouse/cli/serve.py
+++ b/warehouse/cli/serve.py
@@ -62,7 +62,6 @@ def serve(config, bind, reload_, workers):
options = {
# The gunicorn docs recommend (2 x $num_cores) + 1
"workers": (2 * multiprocessing.cpu_count()) + 1,
- "secure_scheme_headers": {"WAREHOUSE-PROTO": "https"},
"forwarded_allow_ips": "*",
}
diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -18,11 +18,12 @@
import transaction
from pyramid import renderers
-from pyramid.config import Configurator
+from pyramid.config import Configurator as _Configurator
from pyramid.httpexceptions import HTTPMovedPermanently
from pyramid.response import Response
from warehouse import __commit__
+from warehouse.utils.proxy import ProxyFixer
from warehouse.utils.static import WarehouseCacheBuster
@@ -31,6 +32,24 @@ class Environment(enum.Enum):
development = "development"
+class Configurator(_Configurator):
+
+ def add_wsgi_middleware(self, middleware, *args, **kwargs):
+ middlewares = self.get_settings().setdefault("wsgi.middlewares", [])
+ middlewares.append((middleware, args, kwargs))
+
+ def make_wsgi_app(self, *args, **kwargs):
+ # Get the WSGI application from the underlying configurator
+ app = super().make_wsgi_app(*args, **kwargs)
+
+ # Look to see if we have any WSGI middlewares configured.
+ for middleware, args, kw in self.get_settings()["wsgi.middlewares"]:
+ app = middleware(app, *args, **kw)
+
+ # Finally, return our now wrapped app
+ return app
+
+
def content_security_policy_tween_factory(handler, registry):
policy = registry.settings.get("csp", {})
policy = "; ".join([" ".join([k] + v) for k, v in sorted(policy.items())])
@@ -113,6 +132,7 @@ def configure(settings=None):
)
# Pull in default configuration from the environment.
+ maybe_set(settings, "warehouse.token", "WAREHOUSE_TOKEN")
maybe_set(settings, "site.name", "SITE_NAME", default="Warehouse")
maybe_set(settings, "aws.key_id", "AWS_ACCESS_KEY_ID")
maybe_set(settings, "aws.secret_key", "AWS_SECRET_ACCESS_KEY")
@@ -287,6 +307,13 @@ def configure(settings=None):
),
)
+ # Enable support of passing certain values like remote host, client
+ # address, and protocol support in from an outer proxy to the application.
+ config.add_wsgi_middleware(
+ ProxyFixer,
+ token=config.registry.settings["warehouse.token"],
+ )
+
# Scan everything for configuration
config.scan(ignore=["warehouse.migrations.env"])
diff --git a/warehouse/utils/proxy.py b/warehouse/utils/proxy.py
new file mode 100644
--- /dev/null
+++ b/warehouse/utils/proxy.py
@@ -0,0 +1,51 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import hmac
+
+
+class ProxyFixer:
+
+ def __init__(self, app, token):
+ self.app = app
+ self.token = token
+
+ def __call__(self, environ, start_response):
+ # Determine if the request comes from a trusted proxy or not by looking
+ # for a token in the request.
+ request_token = environ.get("HTTP_WAREHOUSE_TOKEN")
+ if (request_token is not None
+ and hmac.compare_digest(self.token, request_token)):
+ # Compute our values from the environment.
+ proto = environ.get("HTTP_WAREHOUSE_PROTO", "")
+ remote_addr = environ.get("HTTP_WAREHOUSE_IP", "")
+ host = environ.get("HTTP_WAREHOUSE_HOST", "")
+
+ # Put the new header values into our environment.
+ if remote_addr:
+ environ["REMOTE_ADDR"] = remote_addr
+ if host:
+ environ["HTTP_HOST"] = host
+ if proto:
+ environ["wsgi.url_scheme"] = proto
+
+ # Remove any of the forwarded or warehouse headers from the environment
+ for header in {
+ "HTTP_X_FORWARDED_PROTO", "HTTP_X_FORWARDED_FOR",
+ "HTTP_X_FORWARDED_HOST", "HTTP_WAREHOUSE_TOKEN",
+ "HTTP_WAREHOUSE_PROTO", "HTTP_WAREHOUSE_IP",
+ "HTTP_WAREHOUSE_HOST"}:
+ if header in environ:
+ del environ[header]
+
+ # Dispatch to the real underlying application.
+ return self.app(environ, start_response)
| diff --git a/tests/conftest.py b/tests/conftest.py
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -92,6 +92,7 @@ def drop_database():
def app_config(database):
config = configure(
settings={
+ "warehouse.token": "insecure token",
"camo.url": "http://localhost:9000/",
"camo.key": "insecure key",
"database.url": database,
diff --git a/tests/unit/cli/test_serve.py b/tests/unit/cli/test_serve.py
--- a/tests/unit/cli/test_serve.py
+++ b/tests/unit/cli/test_serve.py
@@ -69,7 +69,6 @@ def test_serve(monkeypatch, cli):
"reload": False,
"workers": 3,
"proc_name": "warehouse",
- "secure_scheme_headers": {"WAREHOUSE-PROTO": "https"},
"forwarded_allow_ips": "*",
},
),
diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py
--- a/tests/unit/test_config.py
+++ b/tests/unit/test_config.py
@@ -20,6 +20,7 @@
from pyramid.httpexceptions import HTTPMovedPermanently
from warehouse import config
+from warehouse.utils.proxy import ProxyFixer
class TestCSPTween:
@@ -223,6 +224,7 @@ def test_configure(monkeypatch, settings, environment):
class FakeRegistry(dict):
def __init__(self):
self.settings = {
+ "warehouse.token": "insecure token",
"warehouse.env": environment,
"camo.url": "http://camo.example.com/",
"pyramid.reload_assets": False,
@@ -233,6 +235,7 @@ def __init__(self):
configurator_obj = pretend.stub(
registry=FakeRegistry(),
include=pretend.call_recorder(lambda include: None),
+ add_wsgi_middleware=pretend.call_recorder(lambda m, *a, **kw: None),
add_renderer=pretend.call_recorder(lambda name, renderer: None),
add_jinja2_renderer=pretend.call_recorder(lambda renderer: None),
add_jinja2_search_path=pretend.call_recorder(lambda path, name: None),
@@ -303,6 +306,9 @@ def __init__(self):
assert configurator_cls.calls == [pretend.call(settings=expected_settings)]
assert result is configurator_obj
+ assert configurator_obj.add_wsgi_middleware.calls == [
+ pretend.call(ProxyFixer, token="insecure token"),
+ ]
assert configurator_obj.include.calls == (
[
pretend.call(x) for x in [
diff --git a/tests/unit/utils/test_proxy.py b/tests/unit/utils/test_proxy.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/utils/test_proxy.py
@@ -0,0 +1,92 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pretend
+
+from warehouse.utils import proxy
+
+
+class TestProxyFixer:
+
+ def test_cleans_environ_forwarded(self):
+ response = pretend.stub()
+ app = pretend.call_recorder(lambda e, s: response)
+
+ environ = {
+ "HTTP_X_FORWARDED_PROTO": "http",
+ "HTTP_X_FORWARDED_FOR": "1.2.3.4",
+ "HTTP_X_FORWARDED_HOST": "example.com",
+ "HTTP_SOME_OTHER_HEADER": "woop",
+ }
+ start_response = pretend.stub()
+
+ resp = proxy.ProxyFixer(app, token=None)(environ, start_response)
+
+ assert resp is response
+ assert app.calls == [
+ pretend.call({"HTTP_SOME_OTHER_HEADER": "woop"}, start_response),
+ ]
+
+ def test_skips_headers(self):
+ response = pretend.stub()
+ app = pretend.call_recorder(lambda e, s: response)
+
+ environ = {
+ "HTTP_WAREHOUSE_TOKEN": "NOPE",
+ "HTTP_WAREHOUSE_PROTO": "http",
+ "HTTP_WAREHOUSE_IP": "1.2.3.4",
+ "HTTP_WAREHOUSE_HOST": "example.com",
+ }
+ start_response = pretend.stub()
+
+ resp = proxy.ProxyFixer(app, token="1234")(environ, start_response)
+
+ assert resp is response
+ assert app.calls == [pretend.call({}, start_response)]
+
+ def test_accepts_headers(self):
+ response = pretend.stub()
+ app = pretend.call_recorder(lambda e, s: response)
+
+ environ = {
+ "HTTP_WAREHOUSE_TOKEN": "1234",
+ "HTTP_WAREHOUSE_PROTO": "http",
+ "HTTP_WAREHOUSE_IP": "1.2.3.4",
+ "HTTP_WAREHOUSE_HOST": "example.com",
+ }
+ start_response = pretend.stub()
+
+ resp = proxy.ProxyFixer(app, token="1234")(environ, start_response)
+
+ assert resp is response
+ assert app.calls == [
+ pretend.call(
+ {
+ "REMOTE_ADDR": "1.2.3.4",
+ "HTTP_HOST": "example.com",
+ "wsgi.url_scheme": "http",
+ },
+ start_response,
+ ),
+ ]
+
+ def test_missing_headers(self):
+ response = pretend.stub()
+ app = pretend.call_recorder(lambda e, s: response)
+
+ environ = {"HTTP_WAREHOUSE_TOKEN": "1234"}
+ start_response = pretend.stub()
+
+ resp = proxy.ProxyFixer(app, token="1234")(environ, start_response)
+
+ assert resp is response
+ assert app.calls == [pretend.call({}, start_response)]
| Only trust X-Forwarded-* headers when they come from Fastly
Split out from #248 because it's not exactly related to that.
We need a way to ensure that the `X-Forwarded-*` headers are only trusted when they come from Fastly. Additionally we need to ensure that Fastly is indeed actually setting them and stripping from incoming requests.
| On #248 @ewdurbin said:
> short convo with @dstufft on IRC lead to two options on trusting specific headers.
> - maintain a whitelist of upstream Fastly cache nodes
> - generate HTTPS Client Certs for Fastly nodes
>
> HTTPS Client Certs leave us less need for manually or regularly updating an IP white list. (At this time > Fastly distributes this information via mailing list updates)
>
> Generating these certs could occur on a per backend basis, one cert for Fastly and multiple for developer and infrastructure use.
>
> Responsibility for stripping unauthorized headers could fall on either nginx or the warehouse application.
>
> nginx can be used to verify the client certificate and either set an nginx boolean or a header (with a header we would need to strip it beforehand).
@mattrobenolt said on irc:
```
[00:59:13] <mattrobenolt> dstufft: We do the Fastly <-> Disqus verification with a secret header.
[00:59:48] <mattrobenolt> So we set a Fastly header when it sends the request to the backend. Just some random key. Then our backends reject anything that doesn't have that key.
```
Specifically, in vcl we do:
```
sub vcl_recv {
set req.http.X-Forwarded-For = req.http.Fastly-Client-IP;
set req.http.X-Something-Something = "foo";
}
```
Then in haproxy:
```
acl via_fastly hdr(X-Something-Something) foo
block unless via_fastly
```
Using a secret header seems to be the easiest method to me.
secret header sounds straightforward and easy to manage.
@dstufft where in the stack would you like header validation to exist and how should non-cdn traffic be handled?
@ewdurbin If we can make Nginx strip X-Forwarded-\* headers unless that secret header exists that would be easiest. If that's hard to do (or bad to do) I can do it in a WSGI middleware too. It doesn't matter to me. I don't mind if non cdn traffic can still hit the backend directly (makes it easy for us to test things) we should just strip the headers when it's non CDN.
Can you use this to solve the problems? http://wiki.nginx.org/HttpRealipModule We do this to keep the IP address from the first hop outside datacenter. So using `set_real_ip_from`. I bet you can mix and match to make magic.
##
Matt Robenolt
@mattrobenolt
On Sunday, April 6, 2014 at 5:23 PM, Donald Stufft wrote:
> @ewdurbin (https://github.com/ewdurbin) If we can make Nginx strip X-Forwarded-\* headers unless that secret header exists that would be easiest. If that's hard to do (or bad to do) I can do it in a WSGI middleware too. It doesn't matter to me. I don't mind if non cdn traffic can still hit the backend directly (makes it easy for us to test things) we should just strip the headers when it's non CDN.
>
> —
> Reply to this email directly or view it on GitHub (https://github.com/pypa/warehouse/issues/256#issuecomment-39688123).
In other words, it's easy for me to do it in Warehouse, but I have a slight preference to not do it in Warehouse if it's not hard.
Actually, if you just set `set_real_ip_from` to Fastly's IPs as well as your internal subnet, everything should be fine, unless it's possible to spoof the connecting IP address to nginx, and afaik, that's not possible.
in the interest of not maintaining a whitelist for `set_real_ip_from` i'd push for warehouse to have a configurable secret header.
ultimately this pushes the responsibility for verifying the header and stripping the appropriate headers to middleware in the warehouse project.
This regressed when we switched to Pyramid.
| 2015-08-10T01:06:52Z | [] | [] |
pypi/warehouse | 599 | pypi__warehouse-599 | [
"397"
] | 54a486ec994cf72cf1ffc76ad3857249af08ef3a | diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -23,8 +23,8 @@
from pyramid.response import Response
from warehouse import __commit__
-from warehouse.utils.proxy import ProxyFixer
from warehouse.utils.static import WarehouseCacheBuster
+from warehouse.utils.wsgi import ProxyFixer, VhmRootRemover
class Environment(enum.Enum):
@@ -314,6 +314,9 @@ def configure(settings=None):
token=config.registry.settings["warehouse.token"],
)
+ # Protect against cache poisoning via the X-Vhm-Root headers.
+ config.add_wsgi_middleware(VhmRootRemover)
+
# Scan everything for configuration
config.scan(ignore=["warehouse.migrations.env"])
diff --git a/warehouse/utils/proxy.py b/warehouse/utils/wsgi.py
similarity index 86%
rename from warehouse/utils/proxy.py
rename to warehouse/utils/wsgi.py
--- a/warehouse/utils/proxy.py
+++ b/warehouse/utils/wsgi.py
@@ -49,3 +49,16 @@ def __call__(self, environ, start_response):
# Dispatch to the real underlying application.
return self.app(environ, start_response)
+
+
+class VhmRootRemover:
+
+ def __init__(self, app):
+ self.app = app
+
+ def __call__(self, environ, start_response):
+ # Delete the X-Vhm-Root header if it exists.
+ if "HTTP_X_VHM_ROOT" in environ:
+ del environ["HTTP_X_VHM_ROOT"]
+
+ return self.app(environ, start_response)
| diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py
--- a/tests/unit/test_config.py
+++ b/tests/unit/test_config.py
@@ -20,7 +20,7 @@
from pyramid.httpexceptions import HTTPMovedPermanently
from warehouse import config
-from warehouse.utils.proxy import ProxyFixer
+from warehouse.utils.wsgi import ProxyFixer, VhmRootRemover
class TestCSPTween:
@@ -308,6 +308,7 @@ def __init__(self):
assert result is configurator_obj
assert configurator_obj.add_wsgi_middleware.calls == [
pretend.call(ProxyFixer, token="insecure token"),
+ pretend.call(VhmRootRemover),
]
assert configurator_obj.include.calls == (
[
diff --git a/tests/unit/utils/test_proxy.py b/tests/unit/utils/test_wsgi.py
similarity index 69%
rename from tests/unit/utils/test_proxy.py
rename to tests/unit/utils/test_wsgi.py
--- a/tests/unit/utils/test_proxy.py
+++ b/tests/unit/utils/test_wsgi.py
@@ -12,7 +12,7 @@
import pretend
-from warehouse.utils import proxy
+from warehouse.utils import wsgi
class TestProxyFixer:
@@ -29,7 +29,7 @@ def test_cleans_environ_forwarded(self):
}
start_response = pretend.stub()
- resp = proxy.ProxyFixer(app, token=None)(environ, start_response)
+ resp = wsgi.ProxyFixer(app, token=None)(environ, start_response)
assert resp is response
assert app.calls == [
@@ -48,7 +48,7 @@ def test_skips_headers(self):
}
start_response = pretend.stub()
- resp = proxy.ProxyFixer(app, token="1234")(environ, start_response)
+ resp = wsgi.ProxyFixer(app, token="1234")(environ, start_response)
assert resp is response
assert app.calls == [pretend.call({}, start_response)]
@@ -65,7 +65,7 @@ def test_accepts_headers(self):
}
start_response = pretend.stub()
- resp = proxy.ProxyFixer(app, token="1234")(environ, start_response)
+ resp = wsgi.ProxyFixer(app, token="1234")(environ, start_response)
assert resp is response
assert app.calls == [
@@ -86,7 +86,34 @@ def test_missing_headers(self):
environ = {"HTTP_WAREHOUSE_TOKEN": "1234"}
start_response = pretend.stub()
- resp = proxy.ProxyFixer(app, token="1234")(environ, start_response)
+ resp = wsgi.ProxyFixer(app, token="1234")(environ, start_response)
assert resp is response
assert app.calls == [pretend.call({}, start_response)]
+
+
+class TestVhmRootRemover:
+
+ def test_removes_header(self):
+ response = pretend.stub()
+ app = pretend.call_recorder(lambda e, s: response)
+ environ = {"HTTP_X_VHM_ROOT": "/foo/bar"}
+ start_response = pretend.stub()
+
+ resp = wsgi.VhmRootRemover(app)(environ, start_response)
+
+ assert resp is response
+ assert app.calls == [pretend.call({}, start_response)]
+
+ def test_passes_through_headers(self):
+ response = pretend.stub()
+ app = pretend.call_recorder(lambda e, s: response)
+ environ = {"HTTP_X_FOOBAR": "wat"}
+ start_response = pretend.stub()
+
+ resp = wsgi.VhmRootRemover(app)(environ, start_response)
+
+ assert resp is response
+ assert app.calls == [
+ pretend.call({"HTTP_X_FOOBAR": "wat"}, start_response),
+ ]
| Protect against X-Vhm-Root headers
Protect against X-Vhm-Root headers (See https://github.com/Pylons/pyramid/issues/1591).
| 2015-08-10T02:14:05Z | [] | [] |
|
pypi/warehouse | 607 | pypi__warehouse-607 | [
"394"
] | 70fc678d88678399d1b221453973426828c7988f | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -90,6 +90,7 @@
"pyramid_services",
"pyramid_tm>=0.12",
"readme>=0.5.1",
+ "requests",
"redis>=2.8.0",
"rfc3986",
"setproctitle",
diff --git a/tasks/pip.py b/tasks/pip.py
--- a/tasks/pip.py
+++ b/tasks/pip.py
@@ -31,8 +31,6 @@
# $ invoke pip.compile
#
--r requirements-deploy.txt
-
""".lstrip()
diff --git a/warehouse/cache/origin/__init__.py b/warehouse/cache/origin/__init__.py
--- a/warehouse/cache/origin/__init__.py
+++ b/warehouse/cache/origin/__init__.py
@@ -10,13 +10,50 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import collections
import functools
+from warehouse import db
from warehouse.cache.origin.interfaces import IOriginCache
-def origin_cache(view_or_seconds):
- def inner(view, seconds=None):
+@db.listens_for(db.Session, "after_flush")
+def store_purge_keys(config, session, flush_context):
+ cache_keys = config.registry["cache_keys"]
+
+ # We'll (ab)use the session.info dictionary to store a list of pending
+ # purges to the session.
+ purges = session.info.setdefault("warehouse.cache.origin.purges", set())
+
+ # Go through each new, changed, and deleted object and attempt to store
+ # a cache key that we'll want to purge when the session has been committed.
+ for obj in (session.new | session.dirty | session.deleted):
+ try:
+ key_maker = cache_keys[obj.__class__]
+ except KeyError:
+ continue
+
+ purges.update(key_maker(obj).purge)
+
+
+@db.listens_for(db.Session, "after_commit")
+def execute_purge(config, session):
+ purges = session.info.pop("warehouse.cache.origin.purges", set())
+
+ try:
+ cacher_factory = config.find_service_factory(IOriginCache)
+ except ValueError:
+ return
+
+ cacher = cacher_factory(None, config)
+ cacher.purge(purges)
+
+
+def origin_cache(seconds, keys=None):
+ if keys is None:
+ keys = []
+
+ def inner(view):
@functools.wraps(view)
def wrapped(context, request):
cache_keys = request.registry["cache_keys"]
@@ -30,7 +67,7 @@ def wrapped(context, request):
request.add_response_callback(
functools.partial(
cacher.cache,
- sorted(key_maker(context)),
+ sorted(key_maker(context).cache + keys),
seconds=seconds,
)
)
@@ -38,24 +75,47 @@ def wrapped(context, request):
return view(context, request)
return wrapped
- if callable(view_or_seconds):
- return inner(view_or_seconds)
- else:
- return functools.partial(inner, seconds=view_or_seconds)
+ return inner
+
+
+CacheKeys = collections.namedtuple("CacheKeys", ["cache", "purge"])
+
+def key_maker_factory(cache_keys, purge_keys):
+ if cache_keys is None:
+ cache_keys = []
+
+ if purge_keys is None:
+ purge_keys = []
-def key_maker_factory(keys):
def key_maker(obj):
- return [k.format(obj=obj) for k in keys]
+ return CacheKeys(
+ cache=[k.format(obj=obj) for k in cache_keys],
+ purge=[k.format(obj=obj) for k in purge_keys],
+ )
+
return key_maker
-def register_origin_cache_keys(config, klass, *keys):
- cache_keys = config.registry.setdefault("cache_keys", {})
- cache_keys[klass] = key_maker_factory(keys)
+def register_origin_cache_keys(config, klass, cache_keys=None,
+ purge_keys=None):
+ key_makers = config.registry.setdefault("cache_keys", {})
+ key_makers[klass] = key_maker_factory(
+ cache_keys=cache_keys,
+ purge_keys=purge_keys,
+ )
def includeme(config):
+ if "origin_cache.backend" in config.registry.settings:
+ cache_class = config.maybe_dotted(
+ config.registry.settings["origin_cache.backend"],
+ )
+ config.register_service_factory(
+ cache_class.create_service,
+ IOriginCache,
+ )
+
config.add_directive(
"register_origin_cache_keys",
register_origin_cache_keys,
diff --git a/warehouse/cache/origin/fastly.py b/warehouse/cache/origin/fastly.py
--- a/warehouse/cache/origin/fastly.py
+++ b/warehouse/cache/origin/fastly.py
@@ -10,14 +10,46 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import urllib.parse
+
+import requests
+
from zope.interface import implementer
+from warehouse import celery
from warehouse.cache.origin.interfaces import IOriginCache
+class UnsuccessfulPurge(Exception):
+ pass
+
+
+@celery.task(bind=True, ignore_result=True, acks_late=True)
+def purge_key(task, request, key):
+ cacher = request.find_service(IOriginCache)
+ try:
+ cacher.purge_key(key)
+ except (requests.ConnectionError, requests.HTTPError, requests.Timeout,
+ UnsuccessfulPurge) as exc:
+ raise task.retry(exc=exc)
+
+
@implementer(IOriginCache)
class FastlyCache:
+ _api_domain = "https://api.fastly.com"
+
+ def __init__(self, *, api_key, service_id):
+ self.api_key = api_key
+ self.service_id = service_id
+
+ @classmethod
+ def create_service(cls, context, request):
+ return cls(
+ api_key=request.registry.settings["origin_cache.api_key"],
+ service_id=request.registry.settings["origin_cache.service_id"],
+ )
+
def cache(self, keys, request, response, *, seconds=None):
response.headers["Surrogate-Key"] = " ".join(keys)
@@ -26,12 +58,25 @@ def cache(self, keys, request, response, *, seconds=None):
"max-age={}".format(seconds)
def purge(self, keys):
- raise NotImplementedError # TODO: Implement Purging
+ for key in keys:
+ purge_key.delay(key)
+ def purge_key(self, key):
+ path = "/service/{service_id}/purge/{key}".format(
+ service_id=self.service_id,
+ key=key,
+ )
+ url = urllib.parse.urljoin(self._api_domain, path)
+ headers = {
+ "Accept": "application/json",
+ "Fastly-Key": self.api_key,
+ "Fastly-Soft-Purge": "1",
+ }
-def includeme(config):
- # Ensure that pyramid_services has been registered.
- config.include("pyramid_services")
+ resp = requests.post(url, headers=headers)
+ resp.raise_for_status()
- # Register an IOriginCache which will handle interfacing with Fastly.
- config.register_service(FastlyCache(), IOriginCache)
+ if resp.json().get("status") != "ok":
+ raise UnsuccessfulPurge(
+ "Could not successfully purge {!r}".format(key)
+ )
diff --git a/warehouse/cache/origin/interfaces.py b/warehouse/cache/origin/interfaces.py
--- a/warehouse/cache/origin/interfaces.py
+++ b/warehouse/cache/origin/interfaces.py
@@ -15,6 +15,12 @@
class IOriginCache(Interface):
+ def create_service(context, request):
+ """
+ Create the service, given the context and request for which it is being
+ created for.
+ """
+
def cache(keys, request, response, *, seconds=None):
"""
A hook that will be called after the request has been processed, used
diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -15,7 +15,9 @@
import shlex
import fs.opener
+import pyramid_services
import transaction
+import zope.interface
from pyramid import renderers
from pyramid.config import Configurator as _Configurator
@@ -117,6 +119,25 @@ def maybe_set_compound(settings, base, name, envvar):
settings[".".join([base, key])] = value
+# Once mmerickel/pyramid_services#1 has a solution we can remove this code and
+# switch to using that instead of doing this ourself. This was taken from the
+# PR in mmerickel/pyramid_services#4.
+def find_service_factory(
+ config_or_request,
+ iface=zope.interface.Interface,
+ context=None,
+ name="",
+):
+ context_iface = zope.interface.providedBy(context)
+ svc_types = (pyramid_services.IServiceClassifier, context_iface)
+
+ adapters = config_or_request.registry.adapters
+ svc_factory = adapters.lookup(svc_types, iface, name=name)
+ if svc_factory is None:
+ raise ValueError("could not find registered service")
+ return svc_factory
+
+
def configure(settings=None):
if settings is None:
settings = {}
@@ -148,6 +169,7 @@ def configure(settings=None):
maybe_set(settings, "docs.url", "DOCS_URL")
maybe_set(settings, "dirs.documentation", "DOCS_DIR")
maybe_set_compound(settings, "files", "backend", "FILES_BACKEND")
+ maybe_set_compound(settings, "origin_cache", "backend", "ORIGIN_CACHE")
# Add the settings we use when the environment is set to development.
if settings["warehouse.env"] == Environment.development:
@@ -231,6 +253,10 @@ def configure(settings=None):
# Register support for services
config.include("pyramid_services")
+ # Register our find_service_factory methods
+ config.add_request_method(find_service_factory)
+ config.add_directive("find_service_factory", find_service_factory)
+
# Register support for XMLRPC and override it's renderer to allow
# specifying custom dumps arguments.
config.include("pyramid_rpc.xmlrpc")
diff --git a/warehouse/packaging/__init__.py b/warehouse/packaging/__init__.py
--- a/warehouse/packaging/__init__.py
+++ b/warehouse/packaging/__init__.py
@@ -35,11 +35,11 @@ def includeme(config):
# Register our origin cache keys
config.register_origin_cache_keys(
Project,
- "project",
- "project/{obj.normalized_name}",
+ cache_keys=["project/{obj.normalized_name}"],
+ purge_keys=["project/{obj.normalized_name}", "all-projects"],
)
config.register_origin_cache_keys(
Release,
- "project",
- "project/{obj.project.normalized_name}",
+ cache_keys=["project/{obj.project.normalized_name}"],
+ purge_keys=["project/{obj.project.normalized_name}", "all-projects"],
)
diff --git a/warehouse/views.py b/warehouse/views.py
--- a/warehouse/views.py
+++ b/warehouse/views.py
@@ -18,6 +18,7 @@
)
from warehouse.accounts import REDIRECT_FIELD_NAME
+from warehouse.cache.origin import origin_cache
from warehouse.csrf import csrf_exempt
from warehouse.packaging.models import Project, Release, File
from warehouse.accounts.models import User
@@ -52,6 +53,9 @@ def forbidden(exc, request):
@view_config(
route_name="index",
renderer="index.html",
+ decorator=[
+ origin_cache(1 * 60 * 60, keys=["all-projects"]), # 1 Hour.
+ ]
)
def index(request):
latest_updated_releases = request.db.query(Release)\
| diff --git a/tests/unit/cache/origin/test_fastly.py b/tests/unit/cache/origin/test_fastly.py
--- a/tests/unit/cache/origin/test_fastly.py
+++ b/tests/unit/cache/origin/test_fastly.py
@@ -10,8 +10,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import celery.exceptions
import pretend
import pytest
+import requests
from zope.interface.verify import verifyClass
@@ -19,16 +21,82 @@
from warehouse.cache.origin.interfaces import IOriginCache
+class TestPurgeKey:
+
+ def test_purges_successfully(self, monkeypatch):
+ task = pretend.stub()
+ cacher = pretend.stub(purge_key=pretend.call_recorder(lambda k: None))
+ request = pretend.stub(
+ find_service=pretend.call_recorder(lambda iface: cacher),
+ )
+
+ fastly.purge_key.__wrapped__.__func__(task, request, "foo")
+
+ assert request.find_service.calls == [pretend.call(IOriginCache)]
+ assert cacher.purge_key.calls == [pretend.call("foo")]
+
+ @pytest.mark.parametrize(
+ "exception_type",
+ [
+ requests.ConnectionError,
+ requests.HTTPError,
+ requests.Timeout,
+ fastly.UnsuccessfulPurge,
+ ],
+ )
+ def test_purges_fails(self, monkeypatch, exception_type):
+ exc = exception_type()
+
+ class Cacher:
+ @staticmethod
+ @pretend.call_recorder
+ def purge_key(key):
+ raise exc
+
+ class Task:
+ @staticmethod
+ @pretend.call_recorder
+ def retry(exc):
+ raise celery.exceptions.Retry
+
+ task = Task()
+ cacher = Cacher()
+ request = pretend.stub(
+ find_service=pretend.call_recorder(lambda iface: cacher),
+ )
+
+ with pytest.raises(celery.exceptions.Retry):
+ fastly.purge_key.__wrapped__.__func__(task, request, "foo")
+
+ assert request.find_service.calls == [pretend.call(IOriginCache)]
+ assert cacher.purge_key.calls == [pretend.call("foo")]
+ assert task.retry.calls == [pretend.call(exc=exc)]
+
+
class TestFastlyCache:
def test_verify_service(self):
assert verifyClass(IOriginCache, fastly.FastlyCache)
+ def test_create_service(self):
+ request = pretend.stub(
+ registry=pretend.stub(
+ settings={
+ "origin_cache.api_key": "the api key",
+ "origin_cache.service_id": "the service id",
+ },
+ ),
+ )
+ cacher = fastly.FastlyCache.create_service(None, request)
+ assert isinstance(cacher, fastly.FastlyCache)
+ assert cacher.api_key == "the api key"
+ assert cacher.service_id == "the service id"
+
def test_adds_surrogate_key(self):
request = pretend.stub()
response = pretend.stub(headers={})
- cacher = fastly.FastlyCache()
+ cacher = fastly.FastlyCache(api_key=None, service_id=None)
cacher.cache(["abc", "defg"], request, response)
assert response.headers == {"Surrogate-Key": "abc defg"}
@@ -37,7 +105,7 @@ def test_adds_surrogate_control(self):
request = pretend.stub()
response = pretend.stub(headers={})
- cacher = fastly.FastlyCache()
+ cacher = fastly.FastlyCache(api_key=None, service_id=None)
cacher.cache(["abc", "defg"], request, response, seconds=9123)
assert response.headers == {
@@ -45,27 +113,71 @@ def test_adds_surrogate_control(self):
"Surrogate-Control": "max-age=9123",
}
- def test_purge_not_implemented(self):
- cacher = fastly.FastlyCache()
-
- with pytest.raises(NotImplementedError):
- cacher.purge(["one", "two"])
-
-
-def test_includeme(monkeypatch):
- fastly_cache_obj = pretend.stub()
- fastly_cache_cls = pretend.call_recorder(lambda: fastly_cache_obj)
- monkeypatch.setattr(fastly, "FastlyCache", fastly_cache_cls)
-
- config = pretend.stub(
- include=pretend.call_recorder(lambda inc: None),
- register_service=pretend.call_recorder(lambda svc, iface: None),
- )
-
- fastly.includeme(config)
-
- assert config.include.calls == [pretend.call("pyramid_services")]
- assert config.register_service.calls == [
- pretend.call(fastly_cache_obj, IOriginCache),
- ]
- assert fastly_cache_cls.calls == [pretend.call()]
+ def test_purge(self, monkeypatch):
+ cacher = fastly.FastlyCache(
+ api_key="an api key",
+ service_id="the-service-id",
+ )
+
+ purge_delay = pretend.call_recorder(lambda *a, **kw: None)
+ monkeypatch.setattr(fastly.purge_key, "delay", purge_delay)
+
+ cacher.purge(["one", "two"])
+
+ assert purge_delay.calls == [pretend.call("one"), pretend.call("two")]
+
+ def test_purge_key_ok(self, monkeypatch):
+ cacher = fastly.FastlyCache(
+ api_key="an api key",
+ service_id="the-service-id",
+ )
+
+ response = pretend.stub(
+ raise_for_status=pretend.call_recorder(lambda: None),
+ json=lambda: {"status": "ok"},
+ )
+ requests_post = pretend.call_recorder(lambda *a, **kw: response)
+ monkeypatch.setattr(requests, "post", requests_post)
+
+ cacher.purge_key("one")
+
+ assert requests_post.calls == [
+ pretend.call(
+ "https://api.fastly.com/service/the-service-id/purge/one",
+ headers={
+ "Accept": "application/json",
+ "Fastly-Key": "an api key",
+ "Fastly-Soft-Purge": "1",
+ },
+ ),
+ ]
+ assert response.raise_for_status.calls == [pretend.call()]
+
+ @pytest.mark.parametrize("result", [{"status": "fail"}, {}])
+ def test_purge_key_unsuccessful(self, monkeypatch, result):
+ cacher = fastly.FastlyCache(
+ api_key="an api key",
+ service_id="the-service-id",
+ )
+
+ response = pretend.stub(
+ raise_for_status=pretend.call_recorder(lambda: None),
+ json=lambda: result,
+ )
+ requests_post = pretend.call_recorder(lambda *a, **kw: response)
+ monkeypatch.setattr(requests, "post", requests_post)
+
+ with pytest.raises(fastly.UnsuccessfulPurge):
+ cacher.purge_key("one")
+
+ assert requests_post.calls == [
+ pretend.call(
+ "https://api.fastly.com/service/the-service-id/purge/one",
+ headers={
+ "Accept": "application/json",
+ "Fastly-Key": "an api key",
+ "Fastly-Soft-Purge": "1",
+ },
+ ),
+ ]
+ assert response.raise_for_status.calls == [pretend.call()]
diff --git a/tests/unit/cache/origin/test_init.py b/tests/unit/cache/origin/test_init.py
--- a/tests/unit/cache/origin/test_init.py
+++ b/tests/unit/cache/origin/test_init.py
@@ -17,12 +17,94 @@
from warehouse.cache.origin.interfaces import IOriginCache
+def test_store_purge_keys():
+ class Type1:
+ pass
+
+ class Type2:
+ pass
+
+ class Type3:
+ pass
+
+ class Type4:
+ pass
+
+ config = pretend.stub(
+ registry={
+ "cache_keys": {
+ Type1: lambda o: origin.CacheKeys(cache=[], purge=["type_1"]),
+ Type2: lambda o: origin.CacheKeys(
+ cache=[],
+ purge=["type_2", "foo"],
+ ),
+ Type3: lambda o: origin.CacheKeys(
+ cache=[],
+ purge=["type_3", "foo"],
+ ),
+ },
+ },
+ )
+ session = pretend.stub(
+ info={},
+ new={Type1()},
+ dirty={Type2()},
+ deleted={Type3(), Type4()},
+ )
+
+ origin.store_purge_keys(config, session, pretend.stub())
+
+ assert session.info["warehouse.cache.origin.purges"] == {
+ "type_1", "type_2", "type_3", "foo",
+ }
+
+
+def test_execute_purge_success():
+ cacher = pretend.stub(purge=pretend.call_recorder(lambda purges: None))
+ factory = pretend.call_recorder(lambda ctx, config: cacher)
+ config = pretend.stub(
+ find_service_factory=pretend.call_recorder(lambda i: factory),
+ )
+ session = pretend.stub(
+ info={
+ "warehouse.cache.origin.purges": {"type_1", "type_2", "foobar"},
+ },
+ )
+
+ origin.execute_purge(config, session)
+
+ assert config.find_service_factory.calls == [
+ pretend.call(origin.IOriginCache),
+ ]
+ assert factory.calls == [pretend.call(None, config)]
+ assert cacher.purge.calls == [pretend.call({"type_1", "type_2", "foobar"})]
+ assert "warehouse.cache.origin.purges" not in session.info
+
+
+def test_execute_purge_no_backend():
+ @pretend.call_recorder
+ def find_service_factory(interface):
+ raise ValueError
+
+ config = pretend.stub(find_service_factory=find_service_factory)
+ session = pretend.stub(
+ info={
+ "warehouse.cache.origin.purges": {"type_1", "type_2", "foobar"},
+ },
+ )
+
+ origin.execute_purge(config, session)
+
+ assert find_service_factory.calls == [pretend.call(origin.IOriginCache)]
+ assert "warehouse.cache.origin.purges" not in session.info
+
+
class TestOriginCache:
def test_no_cache_key(self):
response = pretend.stub()
- @origin.origin_cache
+ @origin.origin_cache(1)
def view(context, request):
return response
@@ -37,7 +119,7 @@ class Fake:
response = pretend.stub()
- @origin.origin_cache
+ @origin.origin_cache(1)
def view(context, request):
return response
@@ -54,8 +136,14 @@ def raiser(iface):
assert view(context, request) is response
assert raiser.calls == [pretend.call(IOriginCache)]
- @pytest.mark.parametrize("seconds", [None, 745])
- def test_response_hook(self, seconds):
+ @pytest.mark.parametrize(
+ ("seconds", "keys"),
+ [
+ (745, None),
+ (823, ["nope", "yup"]),
+ ],
+ )
+ def test_response_hook(self, seconds, keys):
class Fake:
pass
@@ -68,16 +156,15 @@ def cache(keys, request, response, seconds):
response = pretend.stub()
- if seconds is None:
- deco = origin.origin_cache
- else:
- deco = origin.origin_cache(seconds)
+ deco = origin.origin_cache(seconds, keys=keys)
@deco
def view(context, request):
return response
- key_maker = pretend.call_recorder(lambda obj: ["one", "two"])
+ key_maker = pretend.call_recorder(
+ lambda obj: origin.CacheKeys(cache=["one", "two"], purge=[])
+ )
cacher = Cache()
context = Fake()
callbacks = []
@@ -94,13 +181,46 @@ def view(context, request):
callbacks[0](request, response)
assert cacher.cache.calls == [
- pretend.call(["one", "two"], request, response, seconds=seconds),
+ pretend.call(
+ sorted(["one", "two"] + ([] if keys is None else keys)),
+ request,
+ response,
+ seconds=seconds,
+ ),
]
-def test_key_maker():
- key_maker = origin.key_maker_factory(["foo", "foo/{obj.attr}"])
- assert key_maker(pretend.stub(attr="bar")) == ["foo", "foo/bar"]
+class TestKeyMaker:
+
+ def test_both_cache_and_purge(self):
+ key_maker = origin.key_maker_factory(
+ cache_keys=["foo", "foo/{obj.attr}"],
+ purge_keys=["bar", "bar/{obj.attr}"],
+ )
+ assert key_maker(pretend.stub(attr="bar")) == origin.CacheKeys(
+ cache=["foo", "foo/bar"],
+ purge=["bar", "bar/bar"],
+ )
+
+ def test_only_cache(self):
+ key_maker = origin.key_maker_factory(
+ cache_keys=["foo", "foo/{obj.attr}"],
+ purge_keys=None,
+ )
+ assert key_maker(pretend.stub(attr="bar")) == origin.CacheKeys(
+ cache=["foo", "foo/bar"],
+ purge=[],
+ )
+
+ def test_only_purge(self):
+ key_maker = origin.key_maker_factory(
+ cache_keys=None,
+ purge_keys=["bar", "bar/{obj.attr}"],
+ )
+ assert key_maker(pretend.stub(attr="bar")) == origin.CacheKeys(
+ cache=[],
+ purge=["bar", "bar/bar"],
+ )
def test_register_origin_keys(monkeypatch):
@@ -111,17 +231,20 @@ class Fake2:
pass
key_maker = pretend.stub()
- key_maker_factory = pretend.call_recorder(lambda keys: key_maker)
+ key_maker_factory = pretend.call_recorder(lambda **kw: key_maker)
monkeypatch.setattr(origin, "key_maker_factory", key_maker_factory)
config = pretend.stub(registry={})
- origin.register_origin_cache_keys(config, Fake1, "one", "two/{obj.attr}")
- origin.register_origin_cache_keys(config, Fake2, "three")
+ origin.register_origin_cache_keys(
+ config, Fake1, cache_keys=["one", "two/{obj.attr}"])
+ origin.register_origin_cache_keys(
+ config, Fake2, cache_keys=["three"], purge_keys=["lol"],
+ )
assert key_maker_factory.calls == [
- pretend.call(("one", "two/{obj.attr}")),
- pretend.call(("three",)),
+ pretend.call(cache_keys=["one", "two/{obj.attr}"], purge_keys=None),
+ pretend.call(cache_keys=["three"], purge_keys=["lol"]),
]
assert config.registry == {
"cache_keys": {
@@ -131,9 +254,10 @@ class Fake2:
}
-def test_includeme():
+def test_includeme_no_origin_cache():
config = pretend.stub(
add_directive=pretend.call_recorder(lambda name, func: None),
+ registry=pretend.stub(settings={}),
)
origin.includeme(config)
@@ -144,3 +268,33 @@ def test_includeme():
origin.register_origin_cache_keys,
),
]
+
+
+def test_includeme_with_origin_cache():
+ cache_class = pretend.stub(create_service=pretend.stub())
+ config = pretend.stub(
+ add_directive=pretend.call_recorder(lambda name, func: None),
+ registry=pretend.stub(
+ settings={
+ "origin_cache.backend":
+ "warehouse.cache.origin.fastly.FastlyCache",
+ },
+ ),
+ maybe_dotted=pretend.call_recorder(lambda n: cache_class),
+ register_service_factory=pretend.call_recorder(lambda f, iface: None)
+ )
+
+ origin.includeme(config)
+
+ assert config.add_directive.calls == [
+ pretend.call(
+ "register_origin_cache_keys",
+ origin.register_origin_cache_keys,
+ ),
+ ]
+ assert config.maybe_dotted.calls == [
+ pretend.call("warehouse.cache.origin.fastly.FastlyCache"),
+ ]
+ assert config.register_service_factory.calls == [
+ pretend.call(cache_class.create_service, IOriginCache),
+ ]
diff --git a/tests/unit/packaging/test_init.py b/tests/unit/packaging/test_init.py
--- a/tests/unit/packaging/test_init.py
+++ b/tests/unit/packaging/test_init.py
@@ -42,7 +42,7 @@ def test_includme(monkeypatch):
"files.backend": "foo.bar",
},
),
- register_origin_cache_keys=pretend.call_recorder(lambda c, *k: None),
+ register_origin_cache_keys=pretend.call_recorder(lambda c, **kw: None),
)
packaging.includeme(config)
@@ -63,12 +63,15 @@ def test_includme(monkeypatch):
assert config.register_origin_cache_keys.calls == [
pretend.call(
Project,
- "project",
- "project/{obj.normalized_name}",
+ cache_keys=["project/{obj.normalized_name}"],
+ purge_keys=["project/{obj.normalized_name}", "all-projects"],
),
pretend.call(
Release,
- "project",
- "project/{obj.project.normalized_name}",
+ cache_keys=["project/{obj.project.normalized_name}"],
+ purge_keys=[
+ "project/{obj.project.normalized_name}",
+ "all-projects",
+ ],
),
]
diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py
--- a/tests/unit/test_config.py
+++ b/tests/unit/test_config.py
@@ -15,6 +15,7 @@
import fs.opener
import pretend
import pytest
+import zope.interface
from pyramid import renderers
@@ -194,6 +195,27 @@ def test_maybe_set_compound(monkeypatch, environ, base, name, envvar,
assert settings == expected
+@pytest.mark.parametrize("factory", [None, pretend.stub()])
+def test_find_service_factory(monkeypatch, factory):
+ context_iface = pretend.stub()
+ provided_by = pretend.call_recorder(lambda context: context_iface)
+ monkeypatch.setattr(zope.interface, "providedBy", provided_by)
+
+ config_or_request = pretend.stub(
+ registry=pretend.stub(
+ adapters=pretend.stub(
+ lookup=pretend.call_recorder(lambda *a, **kw: factory),
+ ),
+ ),
+ )
+
+ if factory is None:
+ with pytest.raises(ValueError):
+ config.find_service_factory(config_or_request)
+ else:
+ assert config.find_service_factory(config_or_request) is factory
+
+
@pytest.mark.parametrize(
("settings", "environment"),
[
@@ -240,8 +262,10 @@ def __init__(self):
configurator_obj = pretend.stub(
registry=FakeRegistry(),
include=pretend.call_recorder(lambda include: None),
+ add_directive=pretend.call_recorder(lambda name, fn: None),
add_wsgi_middleware=pretend.call_recorder(lambda m, *a, **kw: None),
add_renderer=pretend.call_recorder(lambda name, renderer: None),
+ add_request_method=pretend.call_recorder(lambda fn: None),
add_jinja2_renderer=pretend.call_recorder(lambda renderer: None),
add_jinja2_search_path=pretend.call_recorder(lambda path, name: None),
get_settings=lambda: configurator_settings,
@@ -380,6 +404,12 @@ def __init__(self):
add_settings_dict = configurator_obj.add_settings.calls[1].args[0]
assert add_settings_dict["tm.manager_hook"](pretend.stub()) is \
transaction_manager
+ assert configurator_obj.add_directive.calls == [
+ pretend.call("find_service_factory", config.find_service_factory),
+ ]
+ assert configurator_obj.add_request_method.calls == [
+ pretend.call(config.find_service_factory),
+ ]
assert configurator_obj.add_tween.calls == [
pretend.call("warehouse.config.content_security_policy_tween_factory"),
pretend.call("warehouse.config.require_https_tween_factory"),
| Implement Purging
We need to implement Origin Cache purging, the start of this has been done in `warehouse/cache/origin/`. This needs to support both with and without a context object.
| 2015-08-16T13:44:13Z | [] | [] |
|
pypi/warehouse | 618 | pypi__warehouse-618 | [
"391"
] | 094fd72bd7be68606fb881e85bb5618eb28c434b | diff --git a/warehouse/legacy/api/pypi.py b/warehouse/legacy/api/pypi.py
--- a/warehouse/legacy/api/pypi.py
+++ b/warehouse/legacy/api/pypi.py
@@ -720,6 +720,11 @@ def doc_upload(request):
)
+@view_config(route_name="legacy.api.pypi.doap")
+def doap(request):
+ return _exc_with_message(HTTPGone, "DOAP is no longer supported.")
+
+
@forbidden_view_config(request_param=":action")
def forbidden_legacy(exc, request):
# We're not going to do anything amazing here, this just exists to override
diff --git a/warehouse/routes.py b/warehouse/routes.py
--- a/warehouse/routes.py
+++ b/warehouse/routes.py
@@ -68,6 +68,7 @@ def includeme(config):
"submit_pkg_info",
)
config.add_pypi_action_route("legacy.api.pypi.doc_upload", "doc_upload")
+ config.add_pypi_action_route("legacy.api.pypi.doap", "doap")
# Legacy XMLRPC
config.add_xmlrpc_endpoint(
| diff --git a/tests/functional/legacy_api/test_removed.py b/tests/functional/legacy_api/test_removed.py
--- a/tests/functional/legacy_api/test_removed.py
+++ b/tests/functional/legacy_api/test_removed.py
@@ -26,3 +26,8 @@ def test_remove_doc_upload(webtest):
"410 Uploading documentation is no longer supported, we recommend "
"using https://readthedocs.org/."
)
+
+
+def test_doap(webtest):
+ resp = webtest.get("/pypi?:action=doap&name=foo&version=1.0", status=410)
+ assert resp.status == "410 DOAP is no longer supported."
diff --git a/tests/unit/legacy/api/test_pypi.py b/tests/unit/legacy/api/test_pypi.py
--- a/tests/unit/legacy/api/test_pypi.py
+++ b/tests/unit/legacy/api/test_pypi.py
@@ -1223,6 +1223,13 @@ def test_doc_upload(pyramid_request):
)
+def test_doap(pyramid_request):
+ resp = pypi.doap(pyramid_request)
+
+ assert resp.status_code == 410
+ assert resp.status == "410 DOAP is no longer supported."
+
+
def test_forbidden_legacy():
exc, request = pretend.stub(), pretend.stub()
resp = pypi.forbidden_legacy(exc, request)
diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py
--- a/tests/unit/test_routes.py
+++ b/tests/unit/test_routes.py
@@ -101,6 +101,7 @@ def add_xmlrpc_endpoint(endpoint, pattern, header):
pretend.call("legacy.api.pypi.submit", "submit"),
pretend.call("legacy.api.pypi.submit_pkg_info", "submit_pkg_info"),
pretend.call("legacy.api.pypi.doc_upload", "doc_upload"),
+ pretend.call("legacy.api.pypi.doap", "doap"),
]
assert config.add_xmlrpc_endpoint.calls == [
| Implement DOAP
Determine if we need to keep DOAP or if we can get rid of it. If keeping it then we need to implement it. See some discussion on #58.
| My guess is no one knows what DOAP is. I've seen it on PyPI and had no clue and looked it up one time and concluded that it was something that was in vogue a long time ago and never really caught on?
Believe it or not, I've had people ping me about it only very recently, so
it's being used. No idea how much though, or for what purpose.
On Sat, 14 Mar 2015 at 20:10 Marc Abramowitz notifications@github.com
wrote:
> My guess is no one knows what DOAP is. I've seen it on PyPI and had no
> clue and looked it up one time and concluded that it was something that was
> in vogue a long time ago and never really caught on?
>
> —
> Reply to this email directly or view it on GitHub
> https://github.com/pypa/warehouse/issues/391#issuecomment-80190363.
| 2015-08-22T00:05:37Z | [] | [] |
pypi/warehouse | 627 | pypi__warehouse-627 | [
"625"
] | c368617cfe43e222a3171af0bcc0f0052ba4ec35 | diff --git a/warehouse/db.py b/warehouse/db.py
--- a/warehouse/db.py
+++ b/warehouse/db.py
@@ -28,6 +28,24 @@
__all__ = ["includeme", "metadata", "ModelBase"]
+# We'll add a basic predicate that won't do anything except allow marking a
+# route as read only (or not).
+class ReadOnlyPredicate:
+
+ def __init__(self, val, config):
+ self.val = val
+
+ def text(self):
+ return "read_only = {!r}".format(self.val)
+
+ phash = text
+
+ # This predicate doesn't actually participate in the route selection
+ # process, so we'll just always return True.
+ def __call__(self, info, request):
+ return True
+
+
class ModelBase:
def __repr__(self):
@@ -84,6 +102,16 @@ def _create_session(request):
# Create our session
session = Session(bind=request.registry["sqlalchemy.engine"])
+ # Set our transaction to read only if the route has been marked as read
+ # only.
+ for predicate in request.matched_route.predicates:
+ if isinstance(predicate, ReadOnlyPredicate) and predicate.val:
+ session.execute(
+ """ SET TRANSACTION
+ ISOLATION LEVEL SERIALIZABLE READ ONLY DEFERRABLE
+ """
+ )
+
# Register only this particular session with zope.sqlalchemy
zope.sqlalchemy.register(session, transaction_manager=request.tm)
@@ -103,3 +131,6 @@ def includeme(config):
# Register our request.db property
config.add_request_method(_create_session, name="db", reify=True)
+
+ # Add a route predicate to mark a route as read only.
+ config.add_route_predicate("read_only", ReadOnlyPredicate)
diff --git a/warehouse/routes.py b/warehouse/routes.py
--- a/warehouse/routes.py
+++ b/warehouse/routes.py
@@ -12,7 +12,7 @@
def includeme(config):
- config.add_route("index", "/")
+ config.add_route("index", "/", read_only=True)
# Accounts
config.add_route(
@@ -20,6 +20,7 @@ def includeme(config):
"/user/{username}/",
factory="warehouse.accounts.models:UserFactory",
traverse="/{username}",
+ read_only=True,
)
config.add_route("accounts.login", "/account/login/")
config.add_route("accounts.logout", "/account/logout/")
@@ -30,34 +31,39 @@ def includeme(config):
"/project/{name}/",
factory="warehouse.packaging.models:ProjectFactory",
traverse="/{name}",
+ read_only=True,
)
config.add_route(
"packaging.release",
"/project/{name}/{version}/",
factory="warehouse.packaging.models:ProjectFactory",
traverse="/{name}/{version}",
+ read_only=True,
)
- config.add_route("packaging.file", "/packages/{path:.*}")
+ config.add_route("packaging.file", "/packages/{path:.*}", read_only=True)
# Legacy URLs
- config.add_route("legacy.api.simple.index", "/simple/")
+ config.add_route("legacy.api.simple.index", "/simple/", read_only=True)
config.add_route(
"legacy.api.simple.detail",
"/simple/{name}/",
factory="warehouse.packaging.models:ProjectFactory",
traverse="/{name}/",
+ read_only=True,
)
config.add_route(
"legacy.api.json.project",
"/pypi/{name}/json",
factory="warehouse.packaging.models:ProjectFactory",
traverse="/{name}",
+ read_only=True,
)
config.add_route(
"legacy.api.json.release",
"/pypi/{name}/{version}/json",
factory="warehouse.packaging.models:ProjectFactory",
traverse="/{name}/{version}",
+ read_only=True,
)
# Legacy Action URLs
@@ -75,6 +81,7 @@ def includeme(config):
"pypi",
pattern="/pypi",
header="Content-Type:text/xml",
+ read_only=True,
)
# Legacy Documentation
| diff --git a/tests/unit/test_db.py b/tests/unit/test_db.py
--- a/tests/unit/test_db.py
+++ b/tests/unit/test_db.py
@@ -14,6 +14,7 @@
import alembic.config
import pretend
+import pytest
import sqlalchemy
import venusian
import zope.sqlalchemy
@@ -87,7 +88,15 @@ def config_cls():
]
-def test_create_session(monkeypatch):
+@pytest.mark.parametrize(
+ "predicates",
+ [
+ [],
+ [db.ReadOnlyPredicate(False, None)],
+ [object()],
+ ],
+)
+def test_create_session(monkeypatch, predicates):
session_obj = pretend.stub()
session_cls = pretend.call_recorder(lambda bind: session_obj)
monkeypatch.setattr(db, "Session", session_cls)
@@ -96,6 +105,7 @@ def test_create_session(monkeypatch):
request = pretend.stub(
registry={"sqlalchemy.engine": engine},
tm=pretend.stub(),
+ matched_route=pretend.stub(predicates=predicates),
)
register = pretend.call_recorder(lambda session, transaction_manager: None)
@@ -108,6 +118,37 @@ def test_create_session(monkeypatch):
]
+def test_creates_readonly_session(monkeypatch):
+ session_obj = pretend.stub(execute=pretend.call_recorder(lambda sql: None))
+ session_cls = pretend.call_recorder(lambda bind: session_obj)
+ monkeypatch.setattr(db, "Session", session_cls)
+
+ engine = pretend.stub()
+ request = pretend.stub(
+ registry={"sqlalchemy.engine": engine},
+ tm=pretend.stub(),
+ matched_route=pretend.stub(
+ predicates=[db.ReadOnlyPredicate(True, None)],
+ ),
+ )
+
+ register = pretend.call_recorder(lambda session, transaction_manager: None)
+ monkeypatch.setattr(zope.sqlalchemy, "register", register)
+
+ assert _create_session(request) is session_obj
+ assert session_cls.calls == [pretend.call(bind=engine)]
+ assert register.calls == [
+ pretend.call(session_obj, transaction_manager=request.tm),
+ ]
+ assert session_obj.execute.calls == [
+ pretend.call(
+ """ SET TRANSACTION
+ ISOLATION LEVEL SERIALIZABLE READ ONLY DEFERRABLE
+ """
+ ),
+ ]
+
+
def test_includeme(monkeypatch):
class FakeRegistry(dict):
settings = {"database.url": pretend.stub()}
@@ -118,6 +159,7 @@ class FakeRegistry(dict):
add_directive=pretend.call_recorder(lambda *a: None),
registry=FakeRegistry(),
add_request_method=pretend.call_recorder(lambda f, name, reify: None),
+ add_route_predicate=pretend.call_recorder(lambda *a, **kw: None),
)
monkeypatch.setattr(sqlalchemy, "create_engine", create_engine)
@@ -137,3 +179,6 @@ class FakeRegistry(dict):
assert config.add_request_method.calls == [
pretend.call(_create_session, name="db", reify=True),
]
+ assert config.add_route_predicate.calls == [
+ pretend.call("read_only", db.ReadOnlyPredicate),
+ ]
diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py
--- a/tests/unit/test_routes.py
+++ b/tests/unit/test_routes.py
@@ -40,19 +40,20 @@ def add_pypi_action_route(name, action, **kwargs):
@staticmethod
@pretend.call_recorder
- def add_xmlrpc_endpoint(endpoint, pattern, header):
+ def add_xmlrpc_endpoint(endpoint, pattern, header, read_only=False):
pass
config = FakeConfig()
includeme(config)
assert config.add_route.calls == [
- pretend.call('index', '/'),
+ pretend.call('index', '/', read_only=True),
pretend.call(
"accounts.profile",
"/user/{username}/",
factory="warehouse.accounts.models:UserFactory",
traverse="/{username}",
+ read_only=True,
),
pretend.call("accounts.login", "/account/login/"),
pretend.call("accounts.logout", "/account/logout/"),
@@ -61,32 +62,37 @@ def add_xmlrpc_endpoint(endpoint, pattern, header):
"/project/{name}/",
factory="warehouse.packaging.models:ProjectFactory",
traverse="/{name}",
+ read_only=True,
),
pretend.call(
"packaging.release",
"/project/{name}/{version}/",
factory="warehouse.packaging.models:ProjectFactory",
traverse="/{name}/{version}",
+ read_only=True,
),
- pretend.call("packaging.file", "/packages/{path:.*}"),
- pretend.call("legacy.api.simple.index", "/simple/"),
+ pretend.call("packaging.file", "/packages/{path:.*}", read_only=True),
+ pretend.call("legacy.api.simple.index", "/simple/", read_only=True),
pretend.call(
"legacy.api.simple.detail",
"/simple/{name}/",
factory="warehouse.packaging.models:ProjectFactory",
traverse="/{name}/",
+ read_only=True,
),
pretend.call(
"legacy.api.json.project",
"/pypi/{name}/json",
factory="warehouse.packaging.models:ProjectFactory",
traverse="/{name}",
+ read_only=True,
),
pretend.call(
"legacy.api.json.release",
"/pypi/{name}/{version}/json",
factory="warehouse.packaging.models:ProjectFactory",
traverse="/{name}/{version}",
+ read_only=True,
),
pretend.call("legacy.docs", docs_route_url),
]
@@ -105,5 +111,10 @@ def add_xmlrpc_endpoint(endpoint, pattern, header):
]
assert config.add_xmlrpc_endpoint.calls == [
- pretend.call("pypi", pattern="/pypi", header="Content-Type:text/xml"),
+ pretend.call(
+ "pypi",
+ pattern="/pypi",
+ header="Content-Type:text/xml",
+ read_only=True,
+ ),
]
| Enable Read Only Transactions (via factory wrapper)
If a route isn't going to modify data, then we can use a read only deferrable transaction so that we don't need to take SIRead locks and we skip some of the overhead of having a serializable transaction.
Closes #623
| 2015-08-24T20:02:48Z | [] | [] |
|
pypi/warehouse | 642 | pypi__warehouse-642 | [
"511"
] | 126652348e632be4d02746c4d12a608e23802819 | diff --git a/warehouse/accounts/auth_policy.py b/warehouse/accounts/auth_policy.py
--- a/warehouse/accounts/auth_policy.py
+++ b/warehouse/accounts/auth_policy.py
@@ -16,7 +16,7 @@
)
from warehouse.accounts.interfaces import IUserService
-from warehouse.cache.http import _add_vary_callback
+from warehouse.cache.http import add_vary_callback
class BasicAuthAuthenticationPolicy(_BasicAuthAuthenticationPolicy):
@@ -25,7 +25,7 @@ def unauthenticated_userid(self, request):
# If we're calling into this API on a request, then we want to register
# a callback which will ensure that the response varies based on the
# Authorization header.
- request.add_response_callback(_add_vary_callback("Authorization"))
+ request.add_response_callback(add_vary_callback("Authorization"))
# Dispatch to the real basic authentication policy
username = super().unauthenticated_userid(request)
@@ -43,7 +43,7 @@ def unauthenticated_userid(self, request):
# If we're calling into this API on a request, then we want to register
# a callback which will ensure that the response varies based on the
# Cookie header.
- request.add_response_callback(_add_vary_callback("Cookie"))
+ request.add_response_callback(add_vary_callback("Cookie"))
# Dispatch to the real SessionAuthenticationPolicy
return super().unauthenticated_userid(request)
diff --git a/warehouse/cache/http.py b/warehouse/cache/http.py
--- a/warehouse/cache/http.py
+++ b/warehouse/cache/http.py
@@ -16,7 +16,7 @@
from pyramid.tweens import EXCVIEW
-def _add_vary_callback(*varies):
+def add_vary_callback(*varies):
def inner(request, response):
vary = set(response.vary if response.vary is not None else [])
vary |= set(varies)
@@ -28,7 +28,7 @@ def add_vary(*varies):
def inner(view):
@functools.wraps(view)
def wrapped(context, request):
- request.add_response_callback(_add_vary_callback(*varies))
+ request.add_response_callback(add_vary_callback(*varies))
return view(context, request)
return wrapped
return inner
diff --git a/warehouse/cache/origin/__init__.py b/warehouse/cache/origin/__init__.py
--- a/warehouse/cache/origin/__init__.py
+++ b/warehouse/cache/origin/__init__.py
@@ -13,8 +13,36 @@
import collections
import functools
+import jinja2
+
+from pyramid.request import Request
+from pyramid.threadlocal import get_current_request
+
from warehouse import db
from warehouse.cache.origin.interfaces import IOriginCache
+from warehouse.cache.http import add_vary_callback
+
+
+@jinja2.contextfunction
+def esi_include(ctx, path, cookies=False):
+ request = ctx.get("request") or get_current_request()
+
+ if request.registry.settings.get("warehouse.prevent_esi", False):
+ return ""
+
+ try:
+ cacher = request.find_service(IOriginCache)
+ except ValueError:
+ subreq = Request.blank(path)
+ if cookies:
+ subreq.cookies.update(request.cookies)
+ request.add_response_callback(add_vary_callback("Cookie"))
+ resp = request.invoke_subrequest(subreq, use_tweens=True)
+ include = resp.body.decode(resp.charset)
+ else:
+ include = cacher.esi_include(request, path, cookies=cookies)
+
+ return jinja2.Markup(include)
@db.listens_for(db.Session, "after_flush")
diff --git a/warehouse/cache/origin/fastly.py b/warehouse/cache/origin/fastly.py
--- a/warehouse/cache/origin/fastly.py
+++ b/warehouse/cache/origin/fastly.py
@@ -93,3 +93,14 @@ def purge_key(self, key):
raise UnsuccessfulPurge(
"Could not successfully purge {!r}".format(key)
)
+
+ def esi_include(self, request, path, *, cookies=False):
+ @request.add_response_callback
+ def _esi_vary(request, response):
+ if cookies:
+ response.headers["Warehouse-ESI-Vary"] = "Cookie"
+
+ if cookies:
+ path += "?esi-cookies=1"
+
+ return '<esi:include src="{}" />'.format(path)
diff --git a/warehouse/cache/origin/interfaces.py b/warehouse/cache/origin/interfaces.py
--- a/warehouse/cache/origin/interfaces.py
+++ b/warehouse/cache/origin/interfaces.py
@@ -40,3 +40,13 @@ def purge(keys):
"""
Purge and responses associated with the specific keys.
"""
+
+ def esi_include(request, path, *, cookies=False):
+ """
+ Takes a current request object and a path and it returns a string that
+ can be embedded in order to trigger an Edge Side Include for that URL.
+
+ The cookies arugment is optional, and if provided should indicate
+ whether or not cookies should be passed along to the request when
+ including the content.
+ """
diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -227,6 +227,7 @@ def configure(settings=None):
# We also want to register some global functions for Jinja
jglobals = config.get_settings().setdefault("jinja2.globals", {})
jglobals.setdefault("gravatar", "warehouse.utils.gravatar:gravatar")
+ jglobals.setdefault("esi_include", "warehouse.cache.origin:esi_include")
# We'll store all of our templates in one location, warehouse/templates
# so we'll go ahead and add that to the Jinja2 search path.
diff --git a/warehouse/routes.py b/warehouse/routes.py
--- a/warehouse/routes.py
+++ b/warehouse/routes.py
@@ -12,8 +12,16 @@
def includeme(config):
+ # Basic global routes
config.add_route("index", "/", read_only=True)
+ # ESI Routes
+ config.add_route(
+ "esi.current-user-indicator",
+ "/_esi/current-user-indicator/",
+ read_only=True,
+ )
+
# Accounts
config.add_route(
"accounts.profile",
diff --git a/warehouse/views.py b/warehouse/views.py
--- a/warehouse/views.py
+++ b/warehouse/views.py
@@ -20,10 +20,11 @@
from sqlalchemy.orm import joinedload
from warehouse.accounts import REDIRECT_FIELD_NAME
+from warehouse.accounts.models import User
from warehouse.cache.origin import origin_cache
from warehouse.csrf import csrf_exempt
from warehouse.packaging.models import Project, Release, File
-from warehouse.accounts.models import User
+from warehouse.sessions import uses_session
@view_config(context=HTTPException, decorator=[csrf_exempt])
@@ -84,3 +85,12 @@ def index(request):
'num_releases': num_releases,
'num_files': num_files,
}
+
+
+@view_config(
+ route_name="esi.current-user-indicator",
+ renderer="includes/current-user-indicator.html",
+ decorator=[uses_session],
+)
+def current_user_indicator(request):
+ return {}
| diff --git a/tests/conftest.py b/tests/conftest.py
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -93,6 +93,7 @@ def drop_database():
def app_config(database):
config = configure(
settings={
+ "warehouse.prevent_esi": True,
"warehouse.token": "insecure token",
"camo.url": "http://localhost:9000/",
"camo.key": "insecure key",
diff --git a/tests/unit/accounts/test_auth_policy.py b/tests/unit/accounts/test_auth_policy.py
--- a/tests/unit/accounts/test_auth_policy.py
+++ b/tests/unit/accounts/test_auth_policy.py
@@ -35,7 +35,7 @@ def test_unauthenticated_userid_no_userid(self, monkeypatch):
vary_cb = pretend.stub()
add_vary_cb = pretend.call_recorder(lambda *v: vary_cb)
- monkeypatch.setattr(auth_policy, "_add_vary_callback", add_vary_cb)
+ monkeypatch.setattr(auth_policy, "add_vary_callback", add_vary_cb)
request = pretend.stub(
add_response_callback=pretend.call_recorder(lambda cb: None),
@@ -56,7 +56,7 @@ def test_unauthenticated_userid_with_userid(self, monkeypatch):
vary_cb = pretend.stub()
add_vary_cb = pretend.call_recorder(lambda *v: vary_cb)
- monkeypatch.setattr(auth_policy, "_add_vary_callback", add_vary_cb)
+ monkeypatch.setattr(auth_policy, "add_vary_callback", add_vary_cb)
userid = pretend.stub()
service = pretend.stub(
@@ -89,7 +89,7 @@ def test_unauthenticated_userid(self, monkeypatch):
vary_cb = pretend.stub()
add_vary_cb = pretend.call_recorder(lambda *v: vary_cb)
- monkeypatch.setattr(auth_policy, "_add_vary_callback", add_vary_cb)
+ monkeypatch.setattr(auth_policy, "add_vary_callback", add_vary_cb)
userid = pretend.stub()
request = pretend.stub(
diff --git a/tests/unit/cache/origin/test_fastly.py b/tests/unit/cache/origin/test_fastly.py
--- a/tests/unit/cache/origin/test_fastly.py
+++ b/tests/unit/cache/origin/test_fastly.py
@@ -15,6 +15,7 @@
import pytest
import requests
+from unittest import mock
from zope.interface.verify import verifyClass
from warehouse.cache.origin import fastly
@@ -191,3 +192,31 @@ def test_purge_key_unsuccessful(self, monkeypatch, result):
),
]
assert response.raise_for_status.calls == [pretend.call()]
+
+ @pytest.mark.parametrize(
+ ("path", "cookies", "expected"),
+ [
+ ("/_esi/foo/", True, "/_esi/foo/?esi-cookies=1"),
+ ("/_esi/foo/", False, "/_esi/foo/"),
+ ],
+ )
+ def test_esi_include(self, path, cookies, expected):
+ cacher = fastly.FastlyCache(
+ api_key="an api key",
+ service_id="the-service-id",
+ )
+
+ request = pretend.stub(
+ add_response_callback=pretend.call_recorder(lambda cb: None),
+ )
+
+ assert cacher.esi_include(request, path, cookies=cookies) == \
+ '<esi:include src="{}" />'.format(expected)
+ assert request.add_response_callback.calls == [pretend.call(mock.ANY)]
+ cb_request, cb_response = pretend.stub(), pretend.stub(headers={})
+ cb = request.add_response_callback.calls[0].args[0]
+ cb(cb_request, cb_response)
+ if cookies:
+ assert cb_response.headers == {"Warehouse-ESI-Vary": "Cookie"}
+ else:
+ assert cb_response.headers == {}
diff --git a/tests/unit/cache/origin/test_init.py b/tests/unit/cache/origin/test_init.py
--- a/tests/unit/cache/origin/test_init.py
+++ b/tests/unit/cache/origin/test_init.py
@@ -13,10 +13,78 @@
import pretend
import pytest
+from unittest import mock
+
from warehouse.cache import origin
from warehouse.cache.origin.interfaces import IOriginCache
+class TestESIInclude:
+
+ def test_skips_when_prevents(self):
+ request = pretend.stub(
+ registry=pretend.stub(
+ settings={"warehouse.prevent_esi": True},
+ ),
+ )
+ assert origin.esi_include({"request": request}, "/") == ""
+
+ @pytest.mark.parametrize("cookies", [True, False, None])
+ def test_calls_cacher(self, cookies):
+ cacher = pretend.stub(
+ esi_include=pretend.call_recorder(
+ lambda request, path, cookies: "<the esi tag />"
+ ),
+ )
+ request = pretend.stub(
+ registry=pretend.stub(settings={}),
+ find_service=pretend.call_recorder(lambda iface: cacher),
+ )
+ ctx = {"request": request}
+ assert origin.esi_include(ctx, "/", cookies=cookies) == \
+ "<the esi tag />"
+ assert request.find_service.calls == [pretend.call(IOriginCache)]
+ assert cacher.esi_include.calls == [
+ pretend.call(request, "/", cookies=cookies),
+ ]
+
+ @pytest.mark.parametrize("cookies", [True, False, None])
+ def test_dev_fallback(self, cookies):
+ @pretend.call_recorder
+ def find_service(iface):
+ raise ValueError
+
+ subresponse = pretend.stub(body=b"the response body", charset="utf8")
+ request = pretend.stub(
+ registry=pretend.stub(settings={}),
+ find_service=find_service,
+ invoke_subrequest=pretend.call_recorder(
+ lambda req, use_tweens: subresponse
+ ),
+ cookies={"FooBar": "Wat"},
+ add_response_callback=pretend.call_recorder(lambda cb: None),
+ )
+ ctx = {"request": request}
+
+ assert origin.esi_include(ctx, "/", cookies=cookies) == \
+ "the response body"
+ assert request.find_service.calls == [pretend.call(IOriginCache)]
+ assert request.invoke_subrequest.calls == [
+ pretend.call(mock.ANY, use_tweens=True),
+ ]
+
+ if cookies:
+ subreq = request.invoke_subrequest.calls[0].args[0]
+ assert subreq.path == "/"
+ assert request.add_response_callback.calls == [
+ pretend.call(mock.ANY),
+ ]
+ cb_request, cb_response = pretend.stub(), pretend.stub(vary=[])
+ cb = request.add_response_callback.calls[0].args[0]
+ cb(cb_request, cb_response)
+ assert cb_response.vary == {"Cookie"}
+
+
def test_store_purge_keys():
class Type1:
pass
diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py
--- a/tests/unit/test_routes.py
+++ b/tests/unit/test_routes.py
@@ -48,6 +48,11 @@ def add_xmlrpc_endpoint(endpoint, pattern, header, read_only=False):
assert config.add_route.calls == [
pretend.call('index', '/', read_only=True),
+ pretend.call(
+ "esi.current-user-indicator",
+ "/_esi/current-user-indicator/",
+ read_only=True,
+ ),
pretend.call(
"accounts.profile",
"/user/{username}/",
diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py
--- a/tests/unit/test_views.py
+++ b/tests/unit/test_views.py
@@ -14,7 +14,9 @@
import pretend
-from warehouse.views import forbidden, index, exception_view
+from warehouse.views import (
+ forbidden, index, exception_view, current_user_indicator,
+)
from ..common.db.packaging import (
ProjectFactory, ReleaseFactory, FileFactory,
@@ -76,3 +78,7 @@ def test_index(self, db_request):
'num_releases': 2,
'num_files': 1,
}
+
+
+def test_esi_current_user_indicator():
+ assert current_user_indicator(pretend.stub()) == {}
| Conditional header
If user is logged out, they see Sign up and Sign in button. Otherwise they see buttons to take then to their profile and to logout.
### Logged in
![image](https://cloud.githubusercontent.com/assets/3261985/7530587/48766064-f500-11e4-8a69-0f0d48fb06f7.png)
### Logged out
![image](https://cloud.githubusercontent.com/assets/3261985/7530592/5475adfc-f500-11e4-9c8c-d12aea6d4547.png)
| Hmm, something is apparently broken because this shouldn't actually be possible, accessing request.user should require decorating a view with `@uses_session`. My plan for this was to have it done with javascript actually, the reason is that 99% of the content on a PyPI page is not user specific, so if we keep the responses user agnostic then we canc ache the response once in varnish for all users, instead of once per user.
Heh, I bet it's because it's accessing it inside of a template, and that happens at the rendering layer.
> accessing it inside of a template
That is true. And I don't think anything is broken yet. I forgot to add translations for the text in the buttons in the second commit, but fixed it in the third one.
I'm not sure how the js implementation would work - an ajax request perhaps, but I think that's just overcomplicating the client code.
The problem is, let's take the `/` view other than this little bit of the UI there is nothing in there that is user specific so if we can avoid adding user specific data to it we can use the same cached response for every user. This is important because PyPI gets roughly 100 million HTTP requests a day and anything we can cache at Fastly is a request that doesn't hit our backends which let us serve more traffic with less servers (and be faster at it too). So Warehouse has a goal where we don't add user specific content to pages unless that page _needs_ it to function.
There are a few ways to handle this that doesn't cause us needing to cache every page for each user instead of just globally:
- As you identified, use an AJAX request to fetch the current logged in user and render this little bit of UI based on that.
- Have the login/logout view add another cookie that just states the logged in user's name. The web application will never use this cookie but a little bit of js can decide to render a logged in UI based on the existence of this cookie.
- Use edge side includes which will allow us to cache the main project page, but drop a little tag into the response which will be replaced inside of varnish with the value of some other response. This is sort of like the `{% include %}` tag except it happens inside of varnish and varnish can cache the main response separately (and without being user specific) while caching the user information per user.
To be clear, the broken thing is that this PR works at all, because Warehouse should error when you attempt to access `request.user` unless you've decorated a view with the `uses_session` decorator.
Ah this makes sense. Thanks for explaining this.
I'll reimplement this using the second method.
| 2015-08-29T02:03:42Z | [] | [] |
pypi/warehouse | 655 | pypi__warehouse-655 | [
"346"
] | 45f15b3b0920d50b2be455cba35b925c3f0fb319 | diff --git a/warehouse/legacy/api/pypi.py b/warehouse/legacy/api/pypi.py
--- a/warehouse/legacy/api/pypi.py
+++ b/warehouse/legacy/api/pypi.py
@@ -38,7 +38,8 @@
from warehouse.utils.http import require_POST
-MAXIMUM_FILESIZE = 60 * 1024 * 1024 # 60M
+MAX_FILESIZE = 60 * 1024 * 1024 # 60M
+MAX_SIGSIZE = 8 * 1024 # 8K
ALLOWED_PLATFORMS = {
@@ -606,6 +607,11 @@ def file_upload(request):
"different version.",
)
+ # The project may or may not have a file size specified on the project, if
+ # it does then it may or may not be smaller or larger than our global file
+ # size limits.
+ file_size_limit = max(filter(None, [MAX_FILESIZE, project.upload_limit]))
+
# Buffer the entire file into memory, checking the hash of the file as we
# go along.
file_content = io.BytesIO()
@@ -613,7 +619,7 @@ def file_upload(request):
file_hash = hashlib.md5()
for chunk in iter(lambda: request.POST["content"].file.read(8096), b""):
file_size += len(chunk)
- if file_size > MAXIMUM_FILESIZE:
+ if file_size > file_size_limit:
raise _exc_with_message(HTTPBadRequest, "File too large.")
file_content.write(chunk)
file_hash.update(chunk)
@@ -626,7 +632,7 @@ def file_upload(request):
for chunk in iter(
lambda: request.POST["gpg_signature"].file.read(8096), b""):
signature_size += len(chunk)
- if signature_size > MAXIMUM_FILESIZE:
+ if signature_size > MAX_SIGSIZE:
raise _exc_with_message(HTTPBadRequest, "Signature too large.")
signature.write(chunk)
signature.seek(0)
diff --git a/warehouse/migrations/versions/9177113533_add_a_column_to_specify_a_project_.py b/warehouse/migrations/versions/9177113533_add_a_column_to_specify_a_project_.py
new file mode 100644
--- /dev/null
+++ b/warehouse/migrations/versions/9177113533_add_a_column_to_specify_a_project_.py
@@ -0,0 +1,36 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Add a column to specify a project specific upload limit
+
+Revision ID: 9177113533
+Revises: 10cb17aea73
+Create Date: 2015-09-04 21:06:59.950947
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+
+revision = "9177113533"
+down_revision = "10cb17aea73"
+
+
+def upgrade():
+ op.add_column(
+ "packages",
+ sa.Column("upload_limit", sa.Integer(), nullable=True),
+ )
+
+
+def downgrade():
+ op.drop_column("packages", "upload_limit")
diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py
--- a/warehouse/packaging/models.py
+++ b/warehouse/packaging/models.py
@@ -94,6 +94,7 @@ class Project(db.ModelBase):
server_default=sql.func.now(),
)
has_docs = Column(Boolean)
+ upload_limit = Column(Integer, nullable=True)
releases = orm.relationship(
"Release",
| diff --git a/tests/unit/legacy/api/test_pypi.py b/tests/unit/legacy/api/test_pypi.py
--- a/tests/unit/legacy/api/test_pypi.py
+++ b/tests/unit/legacy/api/test_pypi.py
@@ -716,7 +716,7 @@ def test_upload_fails_with_too_large_file(self, pyramid_config,
"md5_digest": "nope!",
"content": pretend.stub(
filename=filename,
- file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)),
+ file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
),
})
@@ -751,7 +751,7 @@ def test_upload_fails_with_too_large_signature(self, pyramid_config,
),
"gpg_signature": pretend.stub(
filename=filename + ".asc",
- file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)),
+ file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
),
})
@@ -782,7 +782,7 @@ def test_upload_fails_with_previously_used_filename(self, pyramid_config,
"md5_digest": "nope!",
"content": pretend.stub(
filename=filename,
- file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)),
+ file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
),
})
@@ -817,7 +817,7 @@ def test_upload_fails_with_existing_file(self, pyramid_config, db_request):
"md5_digest": "nope!",
"content": pretend.stub(
filename=filename,
- file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)),
+ file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
),
})
@@ -850,7 +850,7 @@ def test_upload_fails_with_wrong_filename(self, pyramid_config,
"md5_digest": "nope!",
"content": pretend.stub(
filename=filename,
- file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)),
+ file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
),
})
@@ -886,7 +886,7 @@ def test_upload_fails_with_invalid_extension(self, pyramid_config,
"md5_digest": "nope!",
"content": pretend.stub(
filename=filename,
- file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)),
+ file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
),
})
@@ -921,7 +921,7 @@ def test_upload_fails_with_unsafe_filename(self, pyramid_config,
"md5_digest": "nope!",
"content": pretend.stub(
filename=filename,
- file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)),
+ file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
),
})
@@ -950,7 +950,7 @@ def test_upload_fails_without_permission(self, pyramid_config, db_request):
"md5_digest": "nope!",
"content": pretend.stub(
filename=filename,
- file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)),
+ file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
),
})
| Ability to increase file size limit on per-project basis?
Once in a while (three times in the years we've been hosting files) we get a request from a user to exceed the hard-coded file size limit in PyPI. Maybe there should be some way to cater for them? Maybe it's too much to ask to handle just their extremely rare use-case?
| Hello, This issue effects me and I am adding a note here (many people probably encounter this issue, but they do not make noise about it). I think it is also a chicken-and-egg (chicken-and-wheel? :-P) problem for getting more scientific Python packages on PyPI. A per-project increase would be helpful. Thanks for improving PyPi :-).
| 2015-09-04T21:18:37Z | [] | [] |
pypi/warehouse | 662 | pypi__warehouse-662 | [
"412"
] | 7bc9e2cfadfbf3951df7ca23edc65a539ef04765 | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -70,12 +70,10 @@
"alembic>=0.7.0",
"Babel",
"bcrypt",
- "boto", # Needed for hosting Documentation on S3
"boto3",
"celery>=3.1",
"click",
"eventlet",
- "fs",
"gunicorn",
"hiredis",
"html5lib",
diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -14,7 +14,6 @@
import os
import shlex
-import fs.opener
import pyramid_services
import transaction
import zope.interface
@@ -169,7 +168,6 @@ def configure(settings=None):
maybe_set(settings, "camo.url", "CAMO_URL")
maybe_set(settings, "camo.key", "CAMO_KEY")
maybe_set(settings, "docs.url", "DOCS_URL")
- maybe_set(settings, "dirs.documentation", "DOCS_DIR")
maybe_set_compound(settings, "files", "backend", "FILES_BACKEND")
maybe_set_compound(settings, "origin_cache", "backend", "ORIGIN_CACHE")
@@ -333,15 +331,6 @@ def configure(settings=None):
# sent via POST.
config.add_tween("warehouse.config.require_https_tween_factory")
- # Configure the filesystems we use.
- config.registry["filesystems"] = {}
- for key, path in {
- k[5:]: v
- for k, v in config.registry.settings.items()
- if k.startswith("dirs.")}.items():
- config.registry["filesystems"][key] = \
- fs.opener.fsopendir(path, create_dir=True)
-
# Enable Warehouse to service our static files
config.add_static_view(
name="static",
| diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py
--- a/tests/unit/test_config.py
+++ b/tests/unit/test_config.py
@@ -12,7 +12,6 @@
from unittest import mock
-import fs.opener
import pretend
import pytest
import zope.interface
@@ -231,10 +230,6 @@ def test_find_service_factory(monkeypatch, factory):
],
)
def test_configure(monkeypatch, settings, environment):
- fs_obj = pretend.stub()
- opener = pretend.call_recorder(lambda path, create_dir: fs_obj)
- monkeypatch.setattr(fs.opener, "fsopendir", opener)
-
json_renderer_obj = pretend.stub()
json_renderer_cls = pretend.call_recorder(lambda **kw: json_renderer_obj)
monkeypatch.setattr(renderers, "JSON", json_renderer_cls)
@@ -419,7 +414,6 @@ def __init__(self):
pretend.call("warehouse.config.content_security_policy_tween_factory"),
pretend.call("warehouse.config.require_https_tween_factory"),
]
- assert configurator_obj.registry["filesystems"] == {"packages": fs_obj}
assert configurator_obj.add_static_view.calls == [
pretend.call(
name="static",
@@ -433,9 +427,6 @@ def __init__(self):
assert configurator_obj.scan.calls == [
pretend.call(ignore=["warehouse.migrations.env", "warehouse.wsgi"]),
]
- assert opener.calls == [
- pretend.call("/srv/data/pypi/packages/", create_dir=True),
- ]
assert configurator_obj.add_renderer.calls == [
pretend.call("json", json_renderer_obj),
pretend.call("xmlrpc", xmlrpc_renderer_obj),
| Move Filesystems out of the registry
Currently our filesystem access goes through an object that's stored on the registry. This is probably less than ideal. We should investigate other methods (services? Unsure) which don't rely on reaching into the registry as much.
| 2015-09-06T22:30:07Z | [] | [] |
|
pypi/warehouse | 666 | pypi__warehouse-666 | [
"665"
] | f9a8519b6023dd85f5ffb7df8e3afdb17a061267 | diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py
--- a/warehouse/packaging/views.py
+++ b/warehouse/packaging/views.py
@@ -91,19 +91,38 @@ def release_detail(release, request):
)
]
- stats_svc = request.find_service(IDownloadStatService)
-
return {
"project": project,
"release": release,
"files": release.files.all(),
"all_releases": all_releases,
"maintainers": maintainers,
- "download_stats": {
- "daily": stats_svc.get_daily_stats(project.name),
- "weekly": stats_svc.get_weekly_stats(project.name),
- "monthly": stats_svc.get_monthly_stats(project.name),
- },
+ }
+
+
+@view_config(
+ route_name="esi.project-stats",
+ renderer="packaging/includes/project-stats.html",
+ decorator=[
+ origin_cache(
+ 15 * 60, # 15 Minutes
+ stale_while_revalidate=30 * 60, # 30 minutes
+ stale_if_error=30 * 60, # 30 minutes
+ ),
+ ],
+)
+def project_stats(project, request):
+ if project.name != request.matchdict.get("name", project.name):
+ return HTTPMovedPermanently(
+ request.current_route_path(name=project.name),
+ )
+
+ stats_svc = request.find_service(IDownloadStatService)
+
+ return {
+ "daily": stats_svc.get_daily_stats(project.name),
+ "weekly": stats_svc.get_weekly_stats(project.name),
+ "monthly": stats_svc.get_monthly_stats(project.name),
}
diff --git a/warehouse/routes.py b/warehouse/routes.py
--- a/warehouse/routes.py
+++ b/warehouse/routes.py
@@ -28,6 +28,13 @@ def includeme(config):
"/_esi/current-user-indicator/",
read_only=True,
)
+ config.add_route(
+ "esi.project-stats",
+ "/_esi/project-stats/{name}/",
+ factory="warehouse.packaging.models:ProjectFactory",
+ traverse="/{name}",
+ read_only=True,
+ )
# Accounts
config.add_route(
| diff --git a/tests/unit/packaging/test_views.py b/tests/unit/packaging/test_views.py
--- a/tests/unit/packaging/test_views.py
+++ b/tests/unit/packaging/test_views.py
@@ -18,7 +18,7 @@
from webob import datetime_utils
from warehouse.packaging import views
-from warehouse.packaging.interfaces import IFileStorage
+from warehouse.packaging.interfaces import IDownloadStatService, IFileStorage
from ...common.db.accounts import UserFactory
from ...common.db.packaging import (
@@ -125,16 +125,6 @@ def test_detail_renders(self, db_request):
role_name="another role",
)
- daily_stats = pretend.stub()
- weekly_stats = pretend.stub()
- monthly_stats = pretend.stub()
-
- db_request.find_service = lambda x: pretend.stub(
- get_daily_stats=lambda p: daily_stats,
- get_weekly_stats=lambda p: weekly_stats,
- get_monthly_stats=lambda p: monthly_stats,
- )
-
result = views.release_detail(releases[1], db_request)
assert result == {
@@ -145,14 +135,53 @@ def test_detail_renders(self, db_request):
(r.version, r.created) for r in reversed(releases)
],
"maintainers": sorted(users, key=lambda u: u.username.lower()),
- "download_stats": {
- "daily": daily_stats,
- "weekly": weekly_stats,
- "monthly": monthly_stats,
- },
}
+class TestProjectStats:
+
+ def test_normalizing_redirects(self, pyramid_request):
+ project = pretend.stub(name="Foo")
+ name = project.name.lower()
+
+ pyramid_request.matchdict = {"name": name}
+ pyramid_request.current_route_path = pretend.call_recorder(
+ lambda name: "/_esi/project-stats/the-redirect/"
+ )
+
+ resp = views.project_stats(project, pyramid_request)
+
+ assert isinstance(resp, HTTPMovedPermanently)
+ assert resp.headers["Location"] == "/_esi/project-stats/the-redirect/"
+ assert pyramid_request.current_route_path.calls == [
+ pretend.call(name=project.name),
+ ]
+
+ def test_project_stats(self, pyramid_request):
+ project = pretend.stub(name="Foo")
+
+ class DownloadService:
+ _stats = {"Foo": {"daily": 10, "weekly": 70, "monthly": 300}}
+
+ def get_daily_stats(self, name):
+ return self._stats[name]["daily"]
+
+ def get_weekly_stats(self, name):
+ return self._stats[name]["weekly"]
+
+ def get_monthly_stats(self, name):
+ return self._stats[name]["monthly"]
+
+ services = {IDownloadStatService: DownloadService()}
+
+ pyramid_request.matchdict = {"name": project.name}
+ pyramid_request.find_service = lambda iface: services[iface]
+
+ stats = views.project_stats(project, pyramid_request)
+
+ assert stats == {"daily": 10, "weekly": 70, "monthly": 300}
+
+
class TestPackages:
def test_404_when_no_file(self, db_request):
diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py
--- a/tests/unit/test_routes.py
+++ b/tests/unit/test_routes.py
@@ -60,6 +60,13 @@ def add_xmlrpc_endpoint(endpoint, pattern, header, read_only=False):
"/_esi/current-user-indicator/",
read_only=True,
),
+ pretend.call(
+ "esi.project-stats",
+ "/_esi/project-stats/{name}/",
+ factory="warehouse.packaging.models:ProjectFactory",
+ traverse="/{name}",
+ read_only=True,
+ ),
pretend.call(
"accounts.profile",
"/user/{username}/",
| Fail gracefully if Redis isn't Up
Right now if the redis for the statistics go down, we'll start throwing errors when rendering the page, which is obviously less than desirable. Ideally we'll want to fail gracefully for this so that if redis goes down we'll just stop rendering statistics until it comes back. A tricky thing here is that we'll want to use ESI so that we don't cache for a long time the statistics with the failure output instead of statistics. This makes sense anyways since we'll want our stats to be updated quicker than the bulk of the page anyways. Additionally, we should ensure that we log the error regardless of whether we raise an exception or not.
| 2015-09-07T21:49:06Z | [] | [] |
|
pypi/warehouse | 689 | pypi__warehouse-689 | [
"495",
"59"
] | f128ff0d9f5b341219d64a795874245307495af0 | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -55,6 +55,7 @@
"warehouse.classifiers",
"warehouse.cli",
"warehouse.cli.db",
+ "warehouse.cli.search",
"warehouse.i18n",
"warehouse.legacy",
"warehouse.legacy.api",
@@ -73,6 +74,8 @@
"boto3",
"celery>=3.1",
"click",
+ "elasticsearch>=1.0.0,<2.0.0",
+ "elasticsearch_dsl",
"hiredis",
"html5lib",
"itsdangerous",
diff --git a/warehouse/cli/search/__init__.py b/warehouse/cli/search/__init__.py
new file mode 100644
--- /dev/null
+++ b/warehouse/cli/search/__init__.py
@@ -0,0 +1,20 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from warehouse.cli import warehouse
+
+
+@warehouse.group() # pragma: no branch
+def search():
+ """
+ Manage the Warehouse Search.
+ """
diff --git a/warehouse/cli/search/reindex.py b/warehouse/cli/search/reindex.py
new file mode 100644
--- /dev/null
+++ b/warehouse/cli/search/reindex.py
@@ -0,0 +1,98 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import binascii
+import os
+
+import click
+
+from elasticsearch.helpers import bulk
+from sqlalchemy.orm import lazyload, joinedload
+
+from warehouse.cli.search import search
+from warehouse.db import Session
+from warehouse.packaging.models import Release, Project
+from warehouse.packaging.search import Project as ProjectDocType
+from warehouse.search import INDEX_NAME, get_index
+
+
+def _project_docs(db):
+ releases = (
+ db.query(Release)
+ .execution_options(stream_results=True)
+ .options(lazyload("*"),
+ joinedload(Release.project)
+ .subqueryload(Project.releases)
+ .load_only("version"))
+ .distinct(Release.name)
+ .order_by(Release.name, Release._pypi_ordering.desc())
+ )
+ for release in releases:
+ p = ProjectDocType.from_db(release)
+ p.full_clean()
+ yield p.to_dict(include_meta=True)
+
+
+@search.command()
+@click.pass_obj
+def reindex(config, **kwargs):
+ """
+ Recreate the Search Index.
+ """
+ client = config.registry["elasticsearch.client"]
+ db = Session(bind=config.registry["sqlalchemy.engine"])
+
+ # We use a randomly named index so that we can do a zero downtime reindex.
+ # Essentially we'll use a randomly named index which we will use until all
+ # of the data has been reindexed, at which point we'll point an alias at
+ # our randomly named index, and then delete the old randomly named index.
+
+ # Create the new index and associate all of our doc types with it.
+ random_token = binascii.hexlify(os.urandom(5)).decode("ascii")
+ new_index_name = "{}-{}".format(INDEX_NAME, random_token)
+ doc_types = config.registry.get("search.doc_types", set())
+ new_index = get_index(new_index_name, doc_types, using=client)
+ new_index.create()
+
+ # From this point on, if any error occurs, we want to be able to delete our
+ # in progress index.
+ try:
+ db.execute(
+ """ BEGIN TRANSACTION
+ ISOLATION LEVEL SERIALIZABLE
+ READ ONLY
+ DEFERRABLE
+ """
+ )
+ db.execute("SET statement_timeout = '600s'")
+
+ bulk(client, _project_docs(db))
+ except:
+ new_index.delete()
+ raise
+ finally:
+ db.rollback()
+ db.close()
+
+ # Now that we've finished indexing all of our data, we'll point the alias
+ # at our new randomly named index and delete the old index.
+ if client.indices.exists_alias(name=INDEX_NAME):
+ to_delete = set()
+ actions = []
+ for name in client.indices.get_alias(name=INDEX_NAME):
+ to_delete.add(name)
+ actions.append({"remove": {"index": name, "alias": INDEX_NAME}})
+ actions.append({"add": {"index": new_index_name, "alias": INDEX_NAME}})
+ client.indices.update_aliases({"actions": actions})
+ client.indices.delete(",".join(to_delete))
+ else:
+ client.indices.put_alias(name=INDEX_NAME, index=new_index_name)
diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -160,6 +160,7 @@ def configure(settings=None):
maybe_set(settings, "celery.broker_url", "AMQP_URL")
maybe_set(settings, "celery.result_url", "REDIS_URL")
maybe_set(settings, "database.url", "DATABASE_URL")
+ maybe_set(settings, "elasticsearch.url", "ELASTICSEARCH_URL")
maybe_set(settings, "sentry.dsn", "SENTRY_DSN")
maybe_set(settings, "sentry.transport", "SENTRY_TRANSPORT")
maybe_set(settings, "sessions.url", "REDIS_URL")
@@ -281,6 +282,8 @@ def configure(settings=None):
# Register the configuration for the PostgreSQL database.
config.include(".db")
+ config.include(".search")
+
# Register the support for AWS
config.include(".aws")
diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py
--- a/warehouse/legacy/api/json.py
+++ b/warehouse/legacy/api/json.py
@@ -40,9 +40,13 @@ def json_project(project, request):
)
try:
- release = project.releases.order_by(
- Release._pypi_ordering.desc()
- ).limit(1).one()
+ release = (
+ request.db.query(Release)
+ .filter(Release.project == project)
+ .order_by(Release._pypi_ordering.desc())
+ .limit(1)
+ .one()
+ )
except NoResultFound:
return HTTPNotFound()
diff --git a/warehouse/legacy/api/simple.py b/warehouse/legacy/api/simple.py
--- a/warehouse/legacy/api/simple.py
+++ b/warehouse/legacy/api/simple.py
@@ -83,7 +83,11 @@ def simple_detail(project, request):
.options(joinedload(File.release))
.filter(
File.name == project.name,
- File.version.in_(project.releases.with_entities(Release.version))
+ File.version.in_(
+ request.db.query(Release)
+ .filter(Release.project == project)
+ .with_entities(Release.version)
+ )
)
.order_by(File.filename)
.all()
diff --git a/warehouse/legacy/api/xmlrpc.py b/warehouse/legacy/api/xmlrpc.py
--- a/warehouse/legacy/api/xmlrpc.py
+++ b/warehouse/legacy/api/xmlrpc.py
@@ -13,6 +13,7 @@
import datetime
import functools
+from elasticsearch_dsl import Q
from pyramid.view import view_config
from pyramid_rpc.xmlrpc import exception_view as _exception_view, xmlrpc_method
from sqlalchemy import func, select
@@ -40,6 +41,47 @@ def exception_view(exc, request):
return _exception_view(exc, request)
+@pypi_xmlrpc(method="search")
+def search(request, spec, operator="and"):
+ if operator not in {"and", "or"}:
+ raise ValueError("Invalid operator, must be one of 'and' or 'or'.")
+
+ # Remove any invalid spec fields
+ spec = {
+ k: [v] if isinstance(v, str) else v
+ for k, v in spec.items()
+ if v and k in {
+ "name", "version", "author", "author_email", "maintainer",
+ "maintainer_email", "home_page", "license", "summary",
+ "description", "keywords", "platform", "download_url",
+ }
+ }
+
+ queries = []
+ for field, value in sorted(spec.items()):
+ q = None
+ for item in value:
+ if q is None:
+ q = Q("match", **{field: item})
+ else:
+ q |= Q("match", **{field: item})
+ queries.append(q)
+
+ if operator == "and":
+ query = request.es.query("bool", must=queries)
+ else:
+ query = request.es.query("bool", should=queries)
+
+ results = query[:1000].execute()
+
+ return [
+ {"name": r.name, "summary": r.summary, "version": v}
+ for r in results
+ for v in r.version
+ if v in spec.get("version", [v])
+ ]
+
+
@pypi_xmlrpc(method="list_packages")
def list_packages(request):
names = request.db.query(Project.name).order_by(Project.name).all()
diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py
--- a/warehouse/packaging/models.py
+++ b/warehouse/packaging/models.py
@@ -101,12 +101,19 @@ class Project(SitemapMixin, db.ModelBase):
"Release",
backref="project",
cascade="all, delete-orphan",
- lazy="dynamic",
+ order_by=lambda: Release._pypi_ordering.desc(),
)
def __getitem__(self, version):
+ session = orm.object_session(self)
+
try:
- return self.releases.filter(Release.version == version).one()
+ return (
+ session.query(Release)
+ .filter((Release.project == self) &
+ (Release.version == version))
+ .one()
+ )
except NoResultFound:
raise KeyError from None
diff --git a/warehouse/packaging/search.py b/warehouse/packaging/search.py
new file mode 100644
--- /dev/null
+++ b/warehouse/packaging/search.py
@@ -0,0 +1,62 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from elasticsearch_dsl import DocType, String, analyzer, MetaField
+
+from warehouse.search import doc_type
+
+
+EmailAnalyzer = analyzer(
+ "email",
+ tokenizer="uax_url_email",
+ filter=["standard", "lowercase", "stop", "snowball"],
+)
+
+
+@doc_type
+class Project(DocType):
+
+ name = String()
+ version = String(index="not_analyzed", multi=True)
+ summary = String(analyzer="snowball")
+ description = String(analyzer="snowball")
+ author = String()
+ author_email = String(analyzer=EmailAnalyzer)
+ maintainer = String()
+ maintainer_email = String(analyzer=EmailAnalyzer)
+ license = String()
+ home_page = String(index="not_analyzed")
+ download_url = String(index="not_analyzed")
+ keywords = String(analyzer="snowball")
+ platform = String(index="not_analyzed")
+
+ class Meta:
+ # disable the _all field to save some space
+ all = MetaField(enabled=False)
+
+ @classmethod
+ def from_db(cls, release):
+ obj = cls(meta={"id": release.project.normalized_name})
+ obj["name"] = release.project.name
+ obj["version"] = [r.version for r in release.project.releases]
+ obj["summary"] = release.summary
+ obj["description"] = release.description
+ obj["author"] = release.author
+ obj["author_email"] = release.author_email
+ obj["maintainer"] = release.maintainer
+ obj["maintainer_email"] = release.maintainer_email
+ obj["home_page"] = release.home_page
+ obj["download_url"] = release.download_url
+ obj["keywords"] = release.keywords
+ obj["platform"] = release.platform
+
+ return obj
diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py
--- a/warehouse/packaging/views.py
+++ b/warehouse/packaging/views.py
@@ -41,9 +41,13 @@ def project_detail(project, request):
)
try:
- release = project.releases.order_by(
- Release._pypi_ordering.desc()
- ).limit(1).one()
+ release = (
+ request.db.query(Release)
+ .filter(Release.project == project)
+ .order_by(Release._pypi_ordering.desc())
+ .limit(1)
+ .one()
+ )
except NoResultFound:
return HTTPNotFound()
@@ -72,10 +76,11 @@ def release_detail(release, request):
# Get all of the registered versions for this Project, in order of newest
# to oldest.
all_releases = (
- project.releases
- .with_entities(Release.version, Release.created)
- .order_by(Release._pypi_ordering.desc())
- .all()
+ request.db.query(Release)
+ .filter(Release.project == project)
+ .with_entities(Release.version, Release.created)
+ .order_by(Release._pypi_ordering.desc())
+ .all()
)
# Get all of the maintainers for this project.
diff --git a/warehouse/search.py b/warehouse/search.py
new file mode 100644
--- /dev/null
+++ b/warehouse/search.py
@@ -0,0 +1,51 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import elasticsearch
+import venusian
+
+from elasticsearch_dsl import Index
+
+
+INDEX_NAME = "warehouse"
+
+
+def doc_type(cls):
+ def callback(scanner, _name, item):
+ types_ = scanner.config.registry.setdefault("search.doc_types", set())
+ types_.add(item)
+
+ venusian.attach(cls, callback)
+
+ return cls
+
+
+def get_index(name, doc_types, *, using):
+ index = Index(name, using=using)
+ for doc_type in doc_types:
+ index.doc_type(doc_type)
+ return index
+
+
+def es(request):
+ client = request.registry["elasticsearch.client"]
+ doc_types = request.registry.get("search.doc_types", set())
+ index = get_index(INDEX_NAME, doc_types, using=client)
+ return index.search()
+
+
+def includeme(config):
+ config.registry["elasticsearch.client"] = elasticsearch.Elasticsearch(
+ [config.registry.settings["elasticsearch.url"]],
+ verify_certs=True,
+ )
+ config.add_request_method(es, name="es", reify=True)
| diff --git a/tests/conftest.py b/tests/conftest.py
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -102,6 +102,7 @@ def app_config(database):
"database.url": database,
"docs.url": "http://docs.example.com/",
"download_stats.url": "redis://localhost:0/",
+ "elasticsearch.url": "https://localhost/",
"files.backend": "warehouse.packaging.services.LocalFileStorage",
"sessions.secret": "123456",
"sessions.url": "redis://localhost:0/",
diff --git a/tests/unit/cli/search/__init__.py b/tests/unit/cli/search/__init__.py
new file mode 100644
diff --git a/tests/unit/cli/search/test_reindex.py b/tests/unit/cli/search/test_reindex.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/cli/search/test_reindex.py
@@ -0,0 +1,257 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+import packaging.version
+import pretend
+
+import warehouse.cli.search.reindex
+
+from warehouse.cli.search.reindex import reindex, _project_docs
+
+from ....common.db.packaging import ProjectFactory, ReleaseFactory
+
+
+def test_project_docs(db_session):
+ projects = [ProjectFactory.create() for _ in range(2)]
+ releases = {
+ p: sorted(
+ [ReleaseFactory.create(project=p) for _ in range(3)],
+ key=lambda r: packaging.version.parse(r.version),
+ reverse=True,
+ )
+ for p in projects
+ }
+
+ assert list(_project_docs(db_session)) == [
+ {
+ "_id": p.normalized_name,
+ "_type": "project",
+ "_source": {
+ "name": p.name,
+ "version": [r.version for r in prs],
+ },
+ }
+ for p, prs in sorted(releases.items(), key=lambda x: x[0].name)
+ ]
+
+
+class FakeESIndices:
+
+ def __init__(self):
+ self.indices = {}
+ self.aliases = {}
+
+ def create(self, index, body):
+ self.indices[index] = body
+
+ def delete(self, index):
+ self.indices.pop(index, None)
+
+ def exists_alias(self, name):
+ return name in self.aliases
+
+ def get_alias(self, name):
+ return self.aliases[name]
+
+ def put_alias(self, name, index):
+ self.aliases.setdefault(name, []).append(index)
+
+ def remove_alias(self, name, alias):
+ self.aliases[name] = [n for n in self.aliases[name] if n != alias]
+ if not self.aliases[name]:
+ del self.aliases[name]
+
+ def update_aliases(self, body):
+ for items in body["actions"]:
+ for action, values in items.items():
+ if action == "add":
+ self.put_alias(values["alias"], values["index"])
+ elif action == "remove":
+ self.remove_alias(values["alias"], values["index"])
+ else:
+ raise ValueError("Unknown action: {!r}.".format(action))
+
+
+class FakeESClient:
+
+ def __init__(self):
+ self.indices = FakeESIndices()
+
+
+class TestReindex:
+
+ def test_fails_when_raising(self, monkeypatch, cli):
+ sess_obj = pretend.stub(
+ execute=pretend.call_recorder(lambda q: None),
+ rollback=pretend.call_recorder(lambda: None),
+ close=pretend.call_recorder(lambda: None),
+ )
+ sess_cls = pretend.call_recorder(lambda bind: sess_obj)
+ monkeypatch.setattr(warehouse.cli.search.reindex, "Session", sess_cls)
+
+ docs = pretend.stub()
+ project_docs = lambda db: docs
+ monkeypatch.setattr(
+ warehouse.cli.search.reindex,
+ "_project_docs",
+ project_docs,
+ )
+
+ es_client = FakeESClient()
+ db_engine = pretend.stub()
+
+ config = pretend.stub(
+ registry={
+ "elasticsearch.client": es_client,
+ "sqlalchemy.engine": db_engine,
+ },
+ )
+
+ class TestException(Exception):
+ pass
+
+ def bulk(client, iterable):
+ assert client is es_client
+ assert iterable is docs
+ raise TestException
+ monkeypatch.setattr(warehouse.cli.search.reindex, "bulk", bulk)
+
+ result = cli.invoke(reindex, obj=config)
+
+ assert result.exit_code == -1
+ assert isinstance(result.exception, TestException)
+ assert sess_cls.calls == [pretend.call(bind=db_engine)]
+ assert sess_obj.execute.calls == [
+ pretend.call(
+ """ BEGIN TRANSACTION
+ ISOLATION LEVEL SERIALIZABLE
+ READ ONLY
+ DEFERRABLE
+ """
+ ),
+ pretend.call("SET statement_timeout = '600s'"),
+ ]
+ assert sess_obj.rollback.calls == [pretend.call()]
+ assert sess_obj.close.calls == [pretend.call()]
+ assert es_client.indices.indices == {}
+
+ def test_successfully_indexes_and_adds_new(self, monkeypatch, cli):
+ sess_obj = pretend.stub(
+ execute=pretend.call_recorder(lambda q: None),
+ rollback=pretend.call_recorder(lambda: None),
+ close=pretend.call_recorder(lambda: None),
+ )
+ sess_cls = pretend.call_recorder(lambda bind: sess_obj)
+ monkeypatch.setattr(warehouse.cli.search.reindex, "Session", sess_cls)
+
+ docs = pretend.stub()
+ project_docs = lambda db: docs
+ monkeypatch.setattr(
+ warehouse.cli.search.reindex,
+ "_project_docs",
+ project_docs,
+ )
+
+ es_client = FakeESClient()
+ db_engine = pretend.stub()
+
+ config = pretend.stub(
+ registry={
+ "elasticsearch.client": es_client,
+ "sqlalchemy.engine": db_engine,
+ },
+ )
+
+ bulk = pretend.call_recorder(lambda client, iterable: None)
+ monkeypatch.setattr(warehouse.cli.search.reindex, "bulk", bulk)
+
+ monkeypatch.setattr(os, "urandom", lambda n: b"\xcb" * n)
+
+ result = cli.invoke(reindex, obj=config)
+
+ assert result.exit_code == 0
+ assert sess_cls.calls == [pretend.call(bind=db_engine)]
+ assert sess_obj.execute.calls == [
+ pretend.call(
+ """ BEGIN TRANSACTION
+ ISOLATION LEVEL SERIALIZABLE
+ READ ONLY
+ DEFERRABLE
+ """
+ ),
+ pretend.call("SET statement_timeout = '600s'"),
+ ]
+ assert bulk.calls == [pretend.call(es_client, docs)]
+ assert sess_obj.rollback.calls == [pretend.call()]
+ assert sess_obj.close.calls == [pretend.call()]
+ assert set(es_client.indices.indices) == {"warehouse-cbcbcbcbcb"}
+ assert es_client.indices.aliases == {
+ "warehouse": ["warehouse-cbcbcbcbcb"],
+ }
+
+ def test_successfully_indexes_and_replaces(self, monkeypatch, cli):
+ sess_obj = pretend.stub(
+ execute=pretend.call_recorder(lambda q: None),
+ rollback=pretend.call_recorder(lambda: None),
+ close=pretend.call_recorder(lambda: None),
+ )
+ sess_cls = pretend.call_recorder(lambda bind: sess_obj)
+ monkeypatch.setattr(warehouse.cli.search.reindex, "Session", sess_cls)
+
+ docs = pretend.stub()
+ project_docs = lambda db: docs
+ monkeypatch.setattr(
+ warehouse.cli.search.reindex,
+ "_project_docs",
+ project_docs,
+ )
+
+ es_client = FakeESClient()
+ es_client.indices.indices["warehouse-aaaaaaaaaa"] = None
+ es_client.indices.aliases["warehouse"] = ["warehouse-aaaaaaaaaa"]
+ db_engine = pretend.stub()
+
+ config = pretend.stub(
+ registry={
+ "elasticsearch.client": es_client,
+ "sqlalchemy.engine": db_engine,
+ },
+ )
+
+ bulk = pretend.call_recorder(lambda client, iterable: None)
+ monkeypatch.setattr(warehouse.cli.search.reindex, "bulk", bulk)
+
+ monkeypatch.setattr(os, "urandom", lambda n: b"\xcb" * n)
+
+ result = cli.invoke(reindex, obj=config)
+
+ assert result.exit_code == 0
+ assert sess_cls.calls == [pretend.call(bind=db_engine)]
+ assert sess_obj.execute.calls == [
+ pretend.call(
+ """ BEGIN TRANSACTION
+ ISOLATION LEVEL SERIALIZABLE
+ READ ONLY
+ DEFERRABLE
+ """
+ ),
+ pretend.call("SET statement_timeout = '600s'"),
+ ]
+ assert bulk.calls == [pretend.call(es_client, docs)]
+ assert sess_obj.rollback.calls == [pretend.call()]
+ assert sess_obj.close.calls == [pretend.call()]
+ assert set(es_client.indices.indices) == {"warehouse-cbcbcbcbcb"}
+ assert es_client.indices.aliases == {
+ "warehouse": ["warehouse-cbcbcbcbcb"],
+ }
diff --git a/tests/unit/legacy/api/test_xmlrpc.py b/tests/unit/legacy/api/test_xmlrpc.py
--- a/tests/unit/legacy/api/test_xmlrpc.py
+++ b/tests/unit/legacy/api/test_xmlrpc.py
@@ -28,6 +28,171 @@
)
+class TestSearch:
+
+ def test_fails_with_invalid_operator(self):
+ with pytest.raises(ValueError):
+ xmlrpc.search(pretend.stub(), {}, "lol nope")
+
+ def test_default_search_operator(self):
+ class FakeQuery:
+ def __init__(self, type, must):
+ self.type = type
+ self.must = must
+
+ def __getitem__(self, name):
+ self.offset = name.start
+ self.limit = name.stop
+ self.step = name.step
+ return self
+
+ def execute(self):
+ assert self.type == "bool"
+ assert [q.to_dict() for q in self.must] == [
+ {"match": {"name": "foo"}},
+ {
+ "bool": {
+ "should": [
+ {"match": {"summary": "one"}},
+ {"match": {"summary": "two"}},
+ ],
+ },
+ },
+ ]
+ assert self.offset is None
+ assert self.limit == 1000
+ assert self.step is None
+ return [
+ pretend.stub(
+ name="foo",
+ summary="my summary",
+ version=["1.0"],
+ ),
+ pretend.stub(
+ name="foo-bar",
+ summary="other summary",
+ version=["2.0", "1.0"],
+ ),
+ ]
+
+ request = pretend.stub(es=pretend.stub(query=FakeQuery))
+ results = xmlrpc.search(
+ request,
+ {"name": "foo", "summary": ["one", "two"]},
+ )
+ assert results == [
+ {"name": "foo", "summary": "my summary", "version": "1.0"},
+ {"name": "foo-bar", "summary": "other summary", "version": "2.0"},
+ {"name": "foo-bar", "summary": "other summary", "version": "1.0"},
+ ]
+
+ def test_searches_with_and(self):
+ class FakeQuery:
+ def __init__(self, type, must):
+ self.type = type
+ self.must = must
+
+ def __getitem__(self, name):
+ self.offset = name.start
+ self.limit = name.stop
+ self.step = name.step
+ return self
+
+ def execute(self):
+ assert self.type == "bool"
+ assert [q.to_dict() for q in self.must] == [
+ {"match": {"name": "foo"}},
+ {
+ "bool": {
+ "should": [
+ {"match": {"summary": "one"}},
+ {"match": {"summary": "two"}},
+ ],
+ },
+ },
+ ]
+ assert self.offset is None
+ assert self.limit == 1000
+ assert self.step is None
+ return [
+ pretend.stub(
+ name="foo",
+ summary="my summary",
+ version=["1.0"],
+ ),
+ pretend.stub(
+ name="foo-bar",
+ summary="other summary",
+ version=["2.0", "1.0"],
+ ),
+ ]
+
+ request = pretend.stub(es=pretend.stub(query=FakeQuery))
+ results = xmlrpc.search(
+ request,
+ {"name": "foo", "summary": ["one", "two"]},
+ "and",
+ )
+ assert results == [
+ {"name": "foo", "summary": "my summary", "version": "1.0"},
+ {"name": "foo-bar", "summary": "other summary", "version": "2.0"},
+ {"name": "foo-bar", "summary": "other summary", "version": "1.0"},
+ ]
+
+ def test_searches_with_or(self):
+ class FakeQuery:
+ def __init__(self, type, should):
+ self.type = type
+ self.should = should
+
+ def __getitem__(self, name):
+ self.offset = name.start
+ self.limit = name.stop
+ self.step = name.step
+ return self
+
+ def execute(self):
+ assert self.type == "bool"
+ assert [q.to_dict() for q in self.should] == [
+ {"match": {"name": "foo"}},
+ {
+ "bool": {
+ "should": [
+ {"match": {"summary": "one"}},
+ {"match": {"summary": "two"}},
+ ],
+ },
+ },
+ ]
+ assert self.offset is None
+ assert self.limit == 1000
+ assert self.step is None
+ return [
+ pretend.stub(
+ name="foo",
+ summary="my summary",
+ version=["1.0"],
+ ),
+ pretend.stub(
+ name="foo-bar",
+ summary="other summary",
+ version=["2.0", "1.0"],
+ ),
+ ]
+
+ request = pretend.stub(es=pretend.stub(query=FakeQuery))
+ results = xmlrpc.search(
+ request,
+ {"name": "foo", "summary": ["one", "two"]},
+ "or",
+ )
+ assert results == [
+ {"name": "foo", "summary": "my summary", "version": "1.0"},
+ {"name": "foo-bar", "summary": "other summary", "version": "2.0"},
+ {"name": "foo-bar", "summary": "other summary", "version": "1.0"},
+ ]
+
+
def test_list_packages(db_request):
projects = [ProjectFactory.create() for _ in range(10)]
assert set(xmlrpc.list_packages(db_request)) == {p.name for p in projects}
diff --git a/tests/unit/packaging/test_search.py b/tests/unit/packaging/test_search.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/packaging/test_search.py
@@ -0,0 +1,55 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pretend
+
+from warehouse.packaging import search
+
+
+def test_build_search():
+ release = pretend.stub(
+ project=pretend.stub(
+ name="Foobar",
+ normalized_name="foobar",
+ releases=[
+ pretend.stub(version="1.0"),
+ pretend.stub(version="2.0"),
+ pretend.stub(version="3.0"),
+ pretend.stub(version="4.0"),
+ ],
+ ),
+ summary="This is my summary",
+ description="This is my description",
+ author="Jane Author",
+ author_email="jane.author@example.com",
+ maintainer="Joe Maintainer",
+ maintainer_email="joe.maintainer@example.com",
+ home_page="https://example.com/foobar/",
+ download_url="https://example.com/foobar/downloads/",
+ keywords="the, keywords, lol",
+ platform="any platform",
+ )
+ obj = search.Project.from_db(release)
+
+ assert obj.meta.id == "foobar"
+ assert obj["name"] == "Foobar"
+ assert obj["version"] == ["1.0", "2.0", "3.0", "4.0"]
+ assert obj["summary"] == "This is my summary"
+ assert obj["description"] == "This is my description"
+ assert obj["author"] == "Jane Author"
+ assert obj["author_email"] == "jane.author@example.com"
+ assert obj["maintainer"] == "Joe Maintainer"
+ assert obj["maintainer_email"] == "joe.maintainer@example.com"
+ assert obj["home_page"] == "https://example.com/foobar/"
+ assert obj["download_url"] == "https://example.com/foobar/downloads/"
+ assert obj["keywords"] == "the, keywords, lol"
+ assert obj["platform"] == "any platform"
diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py
--- a/tests/unit/test_config.py
+++ b/tests/unit/test_config.py
@@ -354,6 +354,7 @@ def __init__(self):
pretend.call(".legacy.action_routing"),
pretend.call(".i18n"),
pretend.call(".db"),
+ pretend.call(".search"),
pretend.call(".aws"),
pretend.call(".celery"),
pretend.call(".sessions"),
diff --git a/tests/unit/test_search.py b/tests/unit/test_search.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/test_search.py
@@ -0,0 +1,42 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pretend
+
+from warehouse import search
+
+
+def test_es(monkeypatch):
+ search_obj = pretend.stub()
+ index_obj = pretend.stub(
+ doc_type=pretend.call_recorder(lambda d: None),
+ search=pretend.call_recorder(lambda: search_obj),
+ )
+ index_cls = pretend.call_recorder(lambda name, using: index_obj)
+ monkeypatch.setattr(search, "Index", index_cls)
+
+ doc_types = [pretend.stub(), pretend.stub()]
+
+ client = pretend.stub()
+ request = pretend.stub(
+ registry={
+ "elasticsearch.client": client,
+ "search.doc_types": doc_types,
+ },
+ )
+
+ es = search.es(request)
+
+ assert es is search_obj
+ assert index_cls.calls == [pretend.call("warehouse", using=client)]
+ assert index_obj.doc_type.calls == [pretend.call(d) for d in doc_types]
+ assert index_obj.search.calls == [pretend.call()]
| [WIP] Elasticsearch support
Fixes #396.
TODO:
- [x] no global `get_current_registry()`
- [x] implement bulk queries
- [x] use https://github.com/elastic/elasticsearch-dsl-py as high-level DSL
- [ ] basic search view
- [ ] tests
Maybe:
- [ ] use workers (like celery) to process indexing
- [ ] script to reindex current db
Port Existing XMLRPC API
- [x] `list_packages`
- [x] `package_releases`
- [x] `release_urls`
- [x] `release_data`
- [x] `search`
- [x] `browse`
- [x] ~~`updated_releases`~~
- [x] `changelog`
- [x] `changelog_last_serial`
- [x] `changelog_since_serial`
- [x] ~~`changed_packages`~~
- [x] ~~`release_downloads`~~
- [x] `package_roles`
- [x] `user_packages`
- [x] `package_hosting_mode`
- [x] `top_packages`
- [x] `list_packages_with_serial`
| @dstufft I think we'll need workers to process the indexing parts to make http requests lightweight, what do you think?
The XML-RPC interface in PyPI is "inspired" by Zope's approach from ... well, a very, very long time ago :) It assumes that any access on the "/pypi" URL with a content-type of text/xml is an XML-RPC access. I think it's reasonable to leave the legacy XML-RPC interface in that place - to respond to text/xml at "/pypi".
I will therefore take the current rpc.py module from PyPI and adapt it to werkzeug and the warehouse model code. And write tests.
TODO: modify the xmlrpc test suite to ensure the data generated by the methods can be serialised (110% coverage)
I've done all but search 'cos Donald and I need to talk about how that stuff should work. Oh, and the 110% thing. Gonna PR what I have right now as I'm finishing up for the day anyway.
Does this still need test work done for it or is only the search stuff now?
Just needs the search hooked in.
Ok cool, thanks.
And with #221 we've now completed implementing the entire XMLRPC API.
This regressed with the move to Pyramid.
I've killed a few unused methods now from this list.
All that's left now is the `search` method, this will require waiting until after #396 has been completed.
| 2015-09-22T21:51:05Z | [] | [] |
pypi/warehouse | 722 | pypi__warehouse-722 | [
"94"
] | 838c6f78761ce2eeeeb438b8353ab3f999cb7a2b | diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -53,7 +53,11 @@ def make_wsgi_app(self, *args, **kwargs):
def content_security_policy_tween_factory(handler, registry):
policy = registry.settings.get("csp", {})
- policy = "; ".join([" ".join([k] + v) for k, v in sorted(policy.items())])
+ policy = "; ".join([
+ " ".join([k] + [v2 for v2 in v if v2 is not None])
+ for k, v in sorted(policy.items())
+ if [v2 for v2 in v if v2 is not None]
+ ])
def content_security_policy_tween(request):
resp = handler(request)
@@ -159,6 +163,7 @@ def configure(settings=None):
maybe_set(settings, "aws.region", "AWS_REGION")
maybe_set(settings, "celery.broker_url", "AMQP_URL")
maybe_set(settings, "celery.result_url", "REDIS_URL")
+ maybe_set(settings, "csp.report_uri", "CSP_REPORT_URI")
maybe_set(settings, "database.url", "DATABASE_URL")
maybe_set(settings, "elasticsearch.url", "ELASTICSEARCH_URL")
maybe_set(settings, "sentry.dsn", "SENTRY_DSN")
@@ -324,6 +329,7 @@ def configure(settings=None):
],
"referrer": ["cross-origin"],
"reflected-xss": ["block"],
+ "report-uri": [config.registry.settings.get("csp.report_uri")],
"script-src": ["'self'"],
"style-src": ["'self'"],
},
| diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py
--- a/tests/unit/test_config.py
+++ b/tests/unit/test_config.py
@@ -397,6 +397,7 @@ def __init__(self):
],
"referrer": ["cross-origin"],
"reflected-xss": ["block"],
+ "report-uri": [None],
"script-src": ["'self'"],
"style-src": ["'self'"],
},
| Implement a report URI for the Content Security Policy
In #93 a Content Security Policy was added, however reports are not being sent anywhere. Ideally they would go to something like Sentry (perhaps even sentry itself?).
| FWIW, we considered adding support for reporting CSP violations to Sentry, but not a single person was interested at the time. We can reconsider if you'd want to use it. :)
—
Sent from Mailbox for iPhone
On Mon, Oct 21, 2013 at 8:30 AM, Donald Stufft notifications@github.com
wrote:
> ## In #93 a Content Security Policy was added, however reports are not being sent anywhere. Ideally they would go to something like Sentry (perhaps even sentry itself?).
>
> Reply to this email directly or view it on GitHub:
> https://github.com/dstufft/warehouse/issues/94
PyPI already uses sentry so getting CSP violations into Sentry would be ideal. Mozilla has written a django-csp app too that has a mini sentry inside of it and they might also be interested in it.
Awesome. I'll look into this and see what we need to do. :)
—
Sent from Mailbox for iPhone
On Mon, Oct 21, 2013 at 8:42 AM, Donald Stufft notifications@github.com
wrote:
> ## PyPI already uses sentry so getting CSP violations into Sentry would be ideal. Mozilla has written a django-csp app too that has a mini sentry inside of it and they might also be interested in it.
>
> Reply to this email directly or view it on GitHub:
> https://github.com/dstufft/warehouse/issues/94#issuecomment-26728550
+1
Oh hey, we can beta test this now in Sentry for those that are interested. @dstufft, gimme a holla.
![](http://cdn.meme.am/instances/50194253.jpg)
| 2015-10-15T18:40:48Z | [] | [] |
pypi/warehouse | 821 | pypi__warehouse-821 | [
"791"
] | 1a44a80f99f516f68dd094ab3a05397138be520e | diff --git a/warehouse/i18n/__init__.py b/warehouse/i18n/__init__.py
--- a/warehouse/i18n/__init__.py
+++ b/warehouse/i18n/__init__.py
@@ -53,6 +53,10 @@ def includeme(config):
# Register our i18n/l10n filters for Jinja2
filters = config.get_settings().setdefault("jinja2.filters", {})
filters.setdefault("format_date", "warehouse.i18n.filters:format_date")
+ filters.setdefault(
+ "format_datetime",
+ "warehouse.i18n.filters:format_datetime",
+ )
# Register our finalize function for Jinja2
config.get_settings()["jinja2.finalize"] = translate_value
diff --git a/warehouse/i18n/filters.py b/warehouse/i18n/filters.py
--- a/warehouse/i18n/filters.py
+++ b/warehouse/i18n/filters.py
@@ -21,3 +21,10 @@ def format_date(ctx, *args, **kwargs):
request = ctx.get("request") or get_current_request()
kwargs.setdefault("locale", request.locale)
return babel.dates.format_date(*args, **kwargs)
+
+
+@jinja2.contextfilter
+def format_datetime(ctx, *args, **kwargs):
+ request = ctx.get("request") or get_current_request()
+ kwargs.setdefault("locale", request.locale)
+ return babel.dates.format_datetime(*args, **kwargs)
| diff --git a/tests/unit/i18n/test_filters.py b/tests/unit/i18n/test_filters.py
--- a/tests/unit/i18n/test_filters.py
+++ b/tests/unit/i18n/test_filters.py
@@ -31,3 +31,20 @@ def test_format_date(monkeypatch):
kwargs.update({"locale": request.locale})
assert format_date.calls == [pretend.call(*args, **kwargs)]
+
+
+def test_format_datetime(monkeypatch):
+ formatted = pretend.stub()
+ format_datetime = pretend.call_recorder(lambda *a, **kw: formatted)
+ monkeypatch.setattr(babel.dates, "format_datetime", format_datetime)
+
+ request = pretend.stub(locale=pretend.stub())
+ ctx = pretend.stub(get=pretend.call_recorder(lambda k: request))
+
+ args = [pretend.stub(), pretend.stub()]
+ kwargs = {"foo": pretend.stub()}
+
+ assert filters.format_datetime(ctx, *args, **kwargs) is formatted
+
+ kwargs.update({"locale": request.locale})
+ assert format_datetime.calls == [pretend.call(*args, **kwargs)]
diff --git a/tests/unit/i18n/test_init.py b/tests/unit/i18n/test_init.py
--- a/tests/unit/i18n/test_init.py
+++ b/tests/unit/i18n/test_init.py
@@ -58,6 +58,7 @@ def test_includeme():
assert config_settings == {
"jinja2.filters": {
"format_date": "warehouse.i18n.filters:format_date",
+ "format_datetime": "warehouse.i18n.filters:format_datetime",
},
"jinja2.finalize": i18n.translate_value,
"jinja2.i18n.domain": "warehouse",
| Display how long ago the release was made
We'll want to display how long ago the release was made on the project/release detail page. Probably we'll want to implement this in javascript?
| @dstufft I also want to make sure that we are consistent in the way that we display dates across all pages. So, something like "April 10th, 2015"?? I prefer to avoid "4/10/15" as a format, as (unless we enable internationalisation) this means 4th October here...
It would also be cool if we could do something similar to https://docs.djangoproject.com/en/dev/ref/contrib/humanize/#naturaltime
Right now all dates are displayed using a filter that does localization (though it only supports en-us right now I think). Anything that isn't "static" should be done via javascript (e.g. April 10th, 2015 is static, it's always going to be april 10th, 2015 but '6 months ago' is dynamic, in 6 more months it's going to be '12 months ago').
Whatever we do though, it should handle localization and translation.
| 2015-11-22T17:00:32Z | [] | [] |
pypi/warehouse | 832 | pypi__warehouse-832 | [
"807"
] | c13f2106adf91d9889ae7bf11d06cc5f58ca6ebd | diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -235,6 +235,7 @@ def configure(settings=None):
# We'll want to configure some filters for Jinja2 as well.
filters = config.get_settings().setdefault("jinja2.filters", {})
+ filters.setdefault("json", "warehouse.filters:tojson")
filters.setdefault("readme", "warehouse.filters:readme_renderer")
filters.setdefault("shorten_number", "warehouse.filters:shorten_number")
@@ -321,6 +322,7 @@ def configure(settings=None):
# Enable a Content Security Policy
config.add_settings({
"csp": {
+ "connect-src": ["'self'"],
"default-src": ["'none'"],
"font-src": ["'self'", "fonts.gstatic.com"],
"frame-ancestors": ["'none'"],
@@ -354,6 +356,9 @@ def configure(settings=None):
),
)
+ # Enable Warehouse to serve our locale files
+ config.add_static_view(name="locales", path="warehouse:locales/")
+
# Enable support of passing certain values like remote host, client
# address, and protocol support in from an outer proxy to the application.
config.add_wsgi_middleware(
diff --git a/warehouse/filters.py b/warehouse/filters.py
--- a/warehouse/filters.py
+++ b/warehouse/filters.py
@@ -12,6 +12,7 @@
import binascii
import hmac
+import json
import urllib.parse
import html5lib
@@ -85,3 +86,7 @@ def shorten_number(value):
return "{:.3g}{}".format(magnitude, symbol)
return str(value)
+
+
+def tojson(value):
+ return json.dumps(value, sort_keys=True, separators=(",", ":"))
diff --git a/warehouse/i18n/__init__.py b/warehouse/i18n/__init__.py
--- a/warehouse/i18n/__init__.py
+++ b/warehouse/i18n/__init__.py
@@ -10,24 +10,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import os.path
-
from babel.core import Locale
-from babel.support import Translations
-
-from warehouse.i18n.translations import (
- JinjaRequestTranslation, translate_value, gettext, ngettext,
-)
-
-
-__all__ = ["gettext", "ngettext", "includeme"]
-
-
-GETTEXT_DOMAIN = "warehouse"
-
-LOCALE_DIR = os.path.abspath(
- os.path.join(os.path.dirname(__file__), "..", "translations")
-)
def _locale(request):
@@ -37,18 +20,9 @@ def _locale(request):
return Locale.parse(request.locale_name)
-def _translation(request):
- """
- Loads a translation object for this request.
- """
- # TODO: Should we cache these in memory?
- return Translations.load(LOCALE_DIR, request.locale, domain=GETTEXT_DOMAIN)
-
-
def includeme(config):
# Add the request attributes
config.add_request_method(_locale, name="locale", reify=True)
- config.add_request_method(_translation, name="translation", reify=True)
# Register our i18n/l10n filters for Jinja2
filters = config.get_settings().setdefault("jinja2.filters", {})
@@ -58,9 +32,6 @@ def includeme(config):
"warehouse.i18n.filters:format_datetime",
)
- # Register our finalize function for Jinja2
- config.get_settings()["jinja2.finalize"] = translate_value
-
- # Configure Jinja2 for translation
- config.get_settings()["jinja2.i18n.domain"] = GETTEXT_DOMAIN
- config.get_settings()["jinja2.i18n.gettext"] = JinjaRequestTranslation
+ # Register our utility functions with Jinja2
+ jglobals = config.get_settings().setdefault("jinja2.globals", {})
+ jglobals.setdefault("l20n", "warehouse.i18n.l20n:l20n")
diff --git a/warehouse/i18n/l20n.py b/warehouse/i18n/l20n.py
new file mode 100644
--- /dev/null
+++ b/warehouse/i18n/l20n.py
@@ -0,0 +1,29 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import jinja2
+
+from markupsafe import Markup as M # noqa
+
+from warehouse.filters import tojson
+
+
+_L20N_TEMPLATE = jinja2.Template(
+ 'data-l10n-id="{{ tid }}"'
+ '{% if data %} data-l10n-args="{{ data }}"{% endif %}',
+ autoescape=True,
+)
+
+
+def l20n(tid, **kwargs):
+ data = tojson(kwargs) if kwargs else None
+ return M(_L20N_TEMPLATE.render(tid=tid, data=data))
diff --git a/warehouse/i18n/translations.py b/warehouse/i18n/translations.py
deleted file mode 100644
--- a/warehouse/i18n/translations.py
+++ /dev/null
@@ -1,97 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections.abc
-import functools
-
-from jinja2 import contextfunction
-from pyramid.threadlocal import get_current_request
-
-
-class TranslationString:
-
- def __init__(self, message_id, plural=None, n=None, mapping=None):
- if mapping is None:
- mapping = {}
-
- self.message_id = message_id
- self.plural = plural
- self.n = n
- self.mapping = mapping
-
- if bool(self.plural) != bool(self.n):
- raise ValueError("Must specify plural and n together.")
-
- def __repr__(self):
- extra = ""
- if self.plural is not None:
- extra = " plural={!r} n={!r}".format(self.plural, self.n)
- return "<TranslationString: message_id={!r}{}>".format(
- self.message_id,
- extra,
- )
-
- def __mod__(self, mapping):
- if not isinstance(mapping, collections.abc.Mapping):
- raise TypeError("Only mappings are supported.")
-
- vals = self.mapping.copy()
- vals.update(mapping)
-
- return TranslationString(
- self.message_id, self.plural, self.n, mapping=vals,
- )
-
- def translate(self, translation):
- if self.plural is not None:
- result = translation.ngettext(self.message_id, self.plural, self.n)
- else:
- result = translation.gettext(self.message_id)
-
- return result % self.mapping
-
-
-class JinjaRequestTranslation:
-
- def __init__(self, domain):
- self.domain = domain
-
- @contextfunction
- def gettext(self, ctx, *args, **kwargs):
- request = ctx.get("request") or get_current_request()
- return request.translation.gettext(*args, **kwargs)
-
- @contextfunction
- def ngettext(self, ctx, *args, **kwargs):
- request = ctx.get("request") or get_current_request()
- return request.translation.ngettext(*args, **kwargs)
-
-
-@contextfunction
-def translate_value(ctx, value):
- if isinstance(value, TranslationString):
- return value.translate(ctx["request"].translation)
-
- return value
-
-
-def gettext(message_id, **kwargs):
- return TranslationString(message_id, mapping=kwargs)
-
-
-def ngettext(message_id, plural, n=None, **kwargs):
- if n is None:
- return functools.partial(
- TranslationString, message_id, plural, mapping=kwargs
- )
-
- return TranslationString(message_id, plural, n, mapping=kwargs)
| diff --git a/tests/unit/i18n/test_init.py b/tests/unit/i18n/test_init.py
--- a/tests/unit/i18n/test_init.py
+++ b/tests/unit/i18n/test_init.py
@@ -27,21 +27,6 @@ def test_sets_locale(monkeypatch):
assert locale_cls.parse.calls == [pretend.call(request.locale_name)]
-def test_loads_translations(monkeypatch):
- translation = pretend.stub()
- translations = pretend.stub(
- load=pretend.call_recorder(lambda d, l, domain: translation)
- )
- monkeypatch.setattr(i18n, "Translations", translations)
-
- request = pretend.stub(locale=pretend.stub())
-
- assert i18n._translation(request) is translation
- assert translations.load.calls == [
- pretend.call(i18n.LOCALE_DIR, request.locale, domain="warehouse"),
- ]
-
-
def test_includeme():
config_settings = {}
config = pretend.stub(
@@ -53,14 +38,13 @@ def test_includeme():
assert config.add_request_method.calls == [
pretend.call(i18n._locale, name="locale", reify=True),
- pretend.call(i18n._translation, name="translation", reify=True),
]
assert config_settings == {
"jinja2.filters": {
"format_date": "warehouse.i18n.filters:format_date",
"format_datetime": "warehouse.i18n.filters:format_datetime",
},
- "jinja2.finalize": i18n.translate_value,
- "jinja2.i18n.domain": "warehouse",
- "jinja2.i18n.gettext": i18n.JinjaRequestTranslation,
+ "jinja2.globals": {
+ "l20n": "warehouse.i18n.l20n:l20n",
+ },
}
diff --git a/tests/unit/i18n/test_l20n.py b/tests/unit/i18n/test_l20n.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/i18n/test_l20n.py
@@ -0,0 +1,31 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from warehouse.i18n import l20n
+
+
+@pytest.mark.parametrize(
+ ("tid", "args", "expected"),
+ [
+ ("foo", {}, 'data-l10n-id="foo"'),
+ (
+ "bar",
+ {"thing": "other"},
+ 'data-l10n-id="bar" '
+ 'data-l10n-args="{"thing":"other"}"',
+ ),
+ ],
+)
+def test_l20n(tid, args, expected):
+ assert l20n.l20n(tid, **args) == expected
diff --git a/tests/unit/i18n/test_translations.py b/tests/unit/i18n/test_translations.py
deleted file mode 100644
--- a/tests/unit/i18n/test_translations.py
+++ /dev/null
@@ -1,206 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pretend
-import pytest
-
-from warehouse.i18n import translations
-
-
-class TestTranslationString:
-
- def test_stores_values(self):
- message_id = pretend.stub()
- plural = pretend.stub()
- n = pretend.stub()
-
- ts = translations.TranslationString(message_id, plural, n)
-
- assert ts.message_id is message_id
- assert ts.plural is plural
- assert ts.n is n
-
- def test_cant_specify_only_plural(self):
- message_id = pretend.stub()
- plural = pretend.stub()
-
- with pytest.raises(ValueError):
- translations.TranslationString(message_id, plural)
-
- def test_cant_specify_only_n(self):
- message_id = pretend.stub()
- n = pretend.stub()
-
- with pytest.raises(ValueError):
- translations.TranslationString(message_id, n=n)
-
- @pytest.mark.parametrize(
- ("args", "expected"),
- [
- (
- ("A Message",),
- "<TranslationString: message_id={!r}>".format("A Message"),
- ),
- (
- ("A Message", "Messages", 3),
- "<TranslationString: message_id={!r} plural={!r} "
- "n={!r}>".format("A Message", "Messages", 3),
- ),
- ],
- )
- def test_repr(self, args, expected):
- ts = translations.TranslationString(*args)
- assert repr(ts) == expected
-
- def test_mod_errors_non_mapping(self):
- ts = translations.TranslationString("Name is %(name)s")
-
- with pytest.raises(TypeError):
- ts % (1,)
-
- def test_mod_adds_mapping_creates(self):
- ts1 = translations.TranslationString("Name is %(name)s")
- ts2 = ts1 % {"name": "MyName"}
- ts3 = ts2 % {"name": "AnotherName"}
-
- assert ts1.mapping == {}
- assert ts2.mapping == {"name": "MyName"}
- assert ts3.mapping == {"name": "AnotherName"}
-
- def test_translate_gettext(self):
- ts = translations.TranslationString("Test %(foo)s")
- ts = ts % {"foo": "bar"}
-
- translation = pretend.stub(
- gettext=pretend.call_recorder(lambda m: "Translated %(foo)s")
- )
-
- assert ts.translate(translation) == "Translated bar"
- assert translation.gettext.calls == [pretend.call("Test %(foo)s")]
-
- def test_translate_ngettext(self):
- ts = translations.TranslationString(
- "Test %(foo)s", "Plural %(foos)s", 1,
- )
- ts = ts % {"foo": "bar"}
-
- translation = pretend.stub(
- ngettext=pretend.call_recorder(
- lambda m, p, n: "Translated %(foo)s"
- ),
- )
-
- assert ts.translate(translation) == "Translated bar"
- assert translation.ngettext.calls == [
- pretend.call("Test %(foo)s", "Plural %(foos)s", 1),
- ]
-
-
-class TestJinjaRequestTranslation:
-
- def test_stores_domain(self):
- domain = pretend.stub()
- assert translations.JinjaRequestTranslation(domain).domain is domain
-
- def test_calls_translation_gettext(self):
- gettext = pretend.call_recorder(lambda m: "A translated message")
-
- context = {
- "request": pretend.stub(translation=pretend.stub(gettext=gettext)),
- }
-
- rt = translations.JinjaRequestTranslation(pretend.stub())
- translated = rt.gettext(context, "A testing message")
-
- assert translated == "A translated message"
- assert gettext.calls == [pretend.call("A testing message")]
-
- def test_calls_translation_ngettext(self):
- ngettext = pretend.call_recorder(lambda m, p, n: "translated message")
-
- context = {
- "request": pretend.stub(
- translation=pretend.stub(ngettext=ngettext),
- ),
- }
-
- rt = translations.JinjaRequestTranslation(pretend.stub())
- translated = rt.ngettext(
- context, "A testing message", "Another testing message", 4,
- )
-
- assert translated == "translated message"
- assert ngettext.calls == [
- pretend.call("A testing message", "Another testing message", 4),
- ]
-
-
-class TestTranslateValue:
-
- def test_with_non_translate_string(self):
- value = pretend.stub()
- assert translations.translate_value(None, value) is value
-
- def test_with_translate_string(self):
- translation = pretend.stub()
- context = {"request": pretend.stub(translation=translation)}
- ts = translations.TranslationString("A Message")
- ts.translate = pretend.call_recorder(lambda t: "translated message")
-
- translated = translations.translate_value(context, ts)
-
- assert translated == "translated message"
- assert ts.translate.calls == [pretend.call(translation)]
-
-
-class TestSimpleAPI:
-
- def test_gettext_no_kwargs(self):
- ts = translations.gettext("My Message")
- assert isinstance(ts, translations.TranslationString)
- assert ts.message_id == "My Message"
- assert ts.plural is None
- assert ts.n is None
- assert ts.mapping == {}
-
- def test_gettext_with_kwargs(self):
- ts = translations.gettext("My Message", foo="bar")
- assert isinstance(ts, translations.TranslationString)
- assert ts.message_id == "My Message"
- assert ts.plural is None
- assert ts.n is None
- assert ts.mapping == {"foo": "bar"}
-
- def test_ngettext_no_n(self):
- ts_p = translations.ngettext("M1", "M2")
- ts = ts_p(3)
- assert isinstance(ts, translations.TranslationString)
- assert ts.message_id == "M1"
- assert ts.plural == "M2"
- assert ts.n == 3
- assert ts.mapping == {}
-
- def test_ngettext_with_n(self):
- ts = translations.ngettext("M1", "M2", 6)
- assert isinstance(ts, translations.TranslationString)
- assert ts.message_id == "M1"
- assert ts.plural == "M2"
- assert ts.n == 6
- assert ts.mapping == {}
-
- def test_ngettext_with_kwargs(self):
- ts = translations.ngettext("M1", "M2", 6, foo="bar")
- assert isinstance(ts, translations.TranslationString)
- assert ts.message_id == "M1"
- assert ts.plural == "M2"
- assert ts.n == 6
- assert ts.mapping == {"foo": "bar"}
diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py
--- a/tests/unit/test_config.py
+++ b/tests/unit/test_config.py
@@ -274,9 +274,7 @@ def __init__(self):
lambda d: configurator_settings.update(d)
),
add_tween=pretend.call_recorder(lambda tween_factory: None),
- add_static_view=pretend.call_recorder(
- lambda name, path, cache_max_age, cachebust: None
- ),
+ add_static_view=pretend.call_recorder(lambda name, path, **kw: None),
scan=pretend.call_recorder(lambda ignore: None),
)
configurator_cls = pretend.call_recorder(lambda settings: configurator_obj)
@@ -401,6 +399,7 @@ def __init__(self):
}),
pretend.call({
"csp": {
+ "connect-src": ["'self'"],
"default-src": ["'none'"],
"font-src": ["'self'", "fonts.gstatic.com"],
"frame-ancestors": ["'none'"],
@@ -437,6 +436,7 @@ def __init__(self):
cache_max_age=0,
cachebust=cachebuster_obj,
),
+ pretend.call(name="locales", path="warehouse:locales/"),
]
assert cachebuster_cls.calls == [
pretend.call("warehouse:static/dist/manifest.json", reload=False),
diff --git a/tests/unit/test_filters.py b/tests/unit/test_filters.py
--- a/tests/unit/test_filters.py
+++ b/tests/unit/test_filters.py
@@ -143,3 +143,13 @@ def test_renders_camo_no_src(self, monkeypatch):
)
def test_shorten_number(inp, expected):
assert filters.shorten_number(inp) == expected
+
+
+@pytest.mark.parametrize(
+ ("inp", "expected"),
+ [
+ ({"foo": "bar", "left": "right"}, '{"foo":"bar","left":"right"}'),
+ ],
+)
+def test_tojson(inp, expected):
+ assert filters.tojson(inp) == expected
| Change Logo alt text
From our accessibility audit by @xavierdutreilh:
The PyPI logo has an alt attribute that I would simplify. Instead of "PyPI logo", I would say "PyPI" because it is enough and because any screen reader would read it like: "IMAGE PyPi" (having 'logo' read too would be redundant)
| 2015-11-26T22:51:12Z | [] | [] |
|
pypi/warehouse | 839 | pypi__warehouse-839 | [
"769"
] | b77db35059b4bbe5a22a0e51e602e0d043fd80ca | diff --git a/warehouse/cache/http.py b/warehouse/cache/http.py
--- a/warehouse/cache/http.py
+++ b/warehouse/cache/http.py
@@ -13,7 +13,8 @@
import collections.abc
import functools
-from pyramid.tweens import EXCVIEW
+
+BUFFER_MAX = 1 * 1024 * 1024 # We'll buffer up to 1MB
def add_vary_callback(*varies):
@@ -72,27 +73,36 @@ def conditional_http_tween(request):
if response.last_modified is not None:
response.conditional_response = True
+ streaming = not isinstance(response.app_iter, collections.abc.Sequence)
+
# We want to only enable the conditional machinery if either we
# were given an explicit ETag header by the view or we have a
# buffered response and can generate the ETag header ourself.
if response.etag is not None:
response.conditional_response = True
- elif (isinstance(response.app_iter, collections.abc.Sequence) and
- len(response.app_iter) == 1):
- # We can only reasonably implement automatic ETags on 200 responses
- # to GET or HEAD requests. The subtles of doing it in other cases
- # are too hard to get right.
- if (request.method in {"GET", "HEAD"} and
- response.status_code == 200):
+ # We can only reasonably implement automatic ETags on 200 responses
+ # to GET or HEAD requests. The subtles of doing it in other cases
+ # are too hard to get right.
+ elif request.method in {"GET", "HEAD"} and response.status_code == 200:
+ # If we have a streaming response, but it's small enough, we'll
+ # just go ahead and buffer it in memory so that we can generate a
+ # ETag for it.
+ if (streaming and response.content_length is not None
+ and response.content_length <= BUFFER_MAX):
+ response.body
+ streaming = False
+
+ # Anything that has survived as a streaming response at this point
+ # and doesn't have an ETag header already, we'll go ahead and give
+ # it one.
+ if not streaming:
response.conditional_response = True
response.md5_etag()
return response
+
return conditional_http_tween
def includeme(config):
- config.add_tween(
- "warehouse.cache.http.conditional_http_tween_factory",
- under=EXCVIEW,
- )
+ config.add_tween("warehouse.cache.http.conditional_http_tween_factory")
diff --git a/warehouse/cache/origin/__init__.py b/warehouse/cache/origin/__init__.py
--- a/warehouse/cache/origin/__init__.py
+++ b/warehouse/cache/origin/__init__.py
@@ -34,6 +34,7 @@ def esi_include(ctx, path, cookies=False):
cacher = request.find_service(IOriginCache)
except ValueError:
subreq = Request.blank(path)
+ subreq.accept_encoding = "identity"
if cookies:
subreq.cookies.update(request.cookies)
request.add_response_callback(add_vary_callback("Cookie"))
diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -22,6 +22,7 @@
from pyramid.config import Configurator as _Configurator
from pyramid.response import Response
from pyramid.static import ManifestCacheBuster
+from pyramid.tweens import EXCVIEW
from pyramid_rpc.xmlrpc import XMLRPCRenderer
from warehouse import __commit__
@@ -344,6 +345,17 @@ def configure(settings=None):
# sent via POST.
config.add_tween("warehouse.config.require_https_tween_factory")
+ # Enable compression of our HTTP responses
+ config.add_tween(
+ "warehouse.utils.compression.compression_tween_factory",
+ over=[
+ "warehouse.cache.http.conditional_http_tween_factory",
+ "pyramid_debugtoolbar.toolbar_tween_factory",
+ "warehouse.raven.raven_tween_factory",
+ EXCVIEW,
+ ],
+ )
+
# Enable Warehouse to service our static files
config.add_static_view(
name="static",
diff --git a/warehouse/raven.py b/warehouse/raven.py
--- a/warehouse/raven.py
+++ b/warehouse/raven.py
@@ -13,7 +13,7 @@
import raven
import raven.middleware
-from pyramid.tweens import EXCVIEW
+from pyramid.tweens import EXCVIEW, INGRESS
from raven.utils.serializer.base import Serializer
from raven.utils.serializer.manager import manager as serialization_manager
@@ -65,7 +65,14 @@ def includeme(config):
config.add_request_method(_raven, name="raven", reify=True)
# Add a tween that will handle catching any exceptions that get raised.
- config.add_tween("warehouse.raven.raven_tween_factory", over=EXCVIEW)
+ config.add_tween(
+ "warehouse.raven.raven_tween_factory",
+ under=[
+ "pyramid_debugtoolbar.toolbar_tween_factory",
+ INGRESS,
+ ],
+ over=EXCVIEW,
+ )
# Wrap the WSGI object with the middle to catch any exceptions we don't
# catch elsewhere.
diff --git a/warehouse/utils/compression.py b/warehouse/utils/compression.py
new file mode 100644
--- /dev/null
+++ b/warehouse/utils/compression.py
@@ -0,0 +1,103 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import base64
+import hashlib
+
+from collections.abc import Sequence
+
+
+ENCODINGS = ["identity", "gzip"]
+DEFAULT_ENCODING = "identity"
+BUFFER_MAX = 1 * 1024 * 1024 # We'll buffer up to 1MB
+
+
+def _compressor(request, response):
+ # Skip items with a Vary: Cookie/Authorization Header because we don't know
+ # if they are safe from the CRIME attack.
+ if (response.vary is not None
+ and (set(response.vary) & {"Cookie", "Authorization"})):
+ return
+
+ # Avoid compression if we've already got a Content-Encoding.
+ if "Content-Encoding" in response.headers:
+ return
+
+ # Ensure that the Accept-Encoding header gets added to the response.
+ vary = set(response.vary if response.vary is not None else [])
+ vary.add("Accept-Encoding")
+ response.vary = vary
+
+ # Negotiate the correct encoding from our request.
+ target_encoding = request.accept_encoding.best_match(
+ ENCODINGS,
+ default_match=DEFAULT_ENCODING,
+ )
+
+ # If we have a Sequence, we'll assume that we aren't streaming the
+ # response because it's probably a list or similar.
+ streaming = not isinstance(response.app_iter, Sequence)
+
+ # If our streaming content is small enough to easily buffer in memory
+ # then we'll just convert it to a non streaming response.
+ if (streaming and response.content_length is not None
+ and response.content_length <= BUFFER_MAX):
+ response.body
+ streaming = False
+
+ if streaming:
+ response.encode_content(encoding=target_encoding, lazy=True)
+
+ # We need to remove the content_length from this response, since
+ # we no longer know what the length of the content will be.
+ response.content_length = None
+
+ # If this has a streaming response, then we need to adjust the ETag
+ # header, if it has one, so that it reflects this. We don't just append
+ # ;gzip to this because we don't want people to try and use it to infer
+ # any information about it.
+ if response.etag is not None:
+ md5_digest = hashlib.md5((response.etag + ";gzip").encode("utf8"))
+ md5_digest = md5_digest.digest()
+ md5_digest = base64.b64encode(md5_digest)
+ md5_digest = md5_digest.replace(b"\n", b"").decode("utf8")
+ response.etag = md5_digest.strip("=")
+ else:
+ original_length = len(response.body)
+ response.encode_content(encoding=target_encoding, lazy=False)
+
+ # If the original length is less than our new, compressed length
+ # then we'll go back to the original. There is no reason to encode
+ # the content if it increases the length of the body.
+ if original_length < len(response.body):
+ response.decode_content()
+
+ # If we've added an encoding to the content, then we'll want to
+ # recompute the ETag.
+ if response.content_encoding is not None:
+ response.md5_etag()
+
+
+def compression_tween_factory(handler, registry):
+
+ def compression_tween(request):
+ response = handler(request)
+
+ # We use a response callback here so that it happens after all of the
+ # other response callbacks are called. This is important because
+ # otherwise we won't be able to check Vary headers and such that are
+ # set by response callbacks.
+ request.add_response_callback(_compressor)
+
+ return response
+
+ return compression_tween
| diff --git a/tests/functional/test_caching.py b/tests/functional/test_caching.py
--- a/tests/functional/test_caching.py
+++ b/tests/functional/test_caching.py
@@ -16,4 +16,4 @@
@pytest.mark.parametrize("path", ["/"])
def test_basic_views_dont_vary(webtest, path):
resp = webtest.get(path)
- assert "Vary" not in resp.headers
+ assert resp.headers["Vary"] == "Accept-Encoding"
diff --git a/tests/unit/cache/test_http.py b/tests/unit/cache/test_http.py
--- a/tests/unit/cache/test_http.py
+++ b/tests/unit/cache/test_http.py
@@ -13,8 +13,6 @@
import pretend
import pytest
-from pyramid.tweens import EXCVIEW
-
from warehouse.cache.http import (
add_vary, cache_control, conditional_http_tween_factory, includeme,
)
@@ -137,12 +135,14 @@ class TestConditionalHTTPTween:
def test_has_last_modified(self):
response = pretend.stub(
last_modified=pretend.stub(),
+ status_code=200,
etag=None,
conditional_response=False,
app_iter=iter([b"foo"]),
+ content_length=None,
)
handler = pretend.call_recorder(lambda request: response)
- request = pretend.stub()
+ request = pretend.stub(method="GET")
tween = conditional_http_tween_factory(handler, pretend.stub())
@@ -155,6 +155,7 @@ def test_explicit_etag(self):
last_modified=None,
etag="foo",
conditional_response=False,
+ app_iter=iter([b"foo"]),
)
handler = pretend.call_recorder(lambda request: response)
request = pretend.stub()
@@ -185,6 +186,28 @@ def test_implicit_etag(self, method):
assert response.conditional_response
assert response.md5_etag.calls == [pretend.call()]
+ @pytest.mark.parametrize("method", ["GET", "HEAD"])
+ def test_implicit_etag_buffers_streaming(self, method):
+ response = pretend.stub(
+ last_modified=None,
+ etag=None,
+ conditional_response=False,
+ md5_etag=pretend.call_recorder(lambda: None),
+ app_iter=iter([b"foo"]),
+ body=b"foo",
+ content_length=3,
+ status_code=200,
+ )
+ handler = pretend.call_recorder(lambda request: response)
+ request = pretend.stub(method=method)
+
+ tween = conditional_http_tween_factory(handler, pretend.stub())
+
+ assert tween(request) is response
+ assert handler.calls == [pretend.call(request)]
+ assert response.conditional_response
+ assert response.md5_etag.calls == [pretend.call()]
+
@pytest.mark.parametrize("method", ["GET", "HEAD"])
def test_no_implicit_etag_no_200(self, method):
response = pretend.stub(
@@ -227,13 +250,15 @@ def test_no_implicit_etag_wrong_method(self, method):
def test_no_etag(self):
response = pretend.stub(
+ status_code=200,
last_modified=None,
etag=None,
conditional_response=False,
app_iter=iter([b"foo"]),
+ content_length=None,
)
handler = pretend.call_recorder(lambda request: response)
- request = pretend.stub()
+ request = pretend.stub(method="GET")
tween = conditional_http_tween_factory(handler, pretend.stub())
@@ -244,13 +269,10 @@ def test_no_etag(self):
def test_includeme():
config = pretend.stub(
- add_tween=pretend.call_recorder(lambda t, under: None),
+ add_tween=pretend.call_recorder(lambda t: None),
)
includeme(config)
assert config.add_tween.calls == [
- pretend.call(
- "warehouse.cache.http.conditional_http_tween_factory",
- under=EXCVIEW,
- ),
+ pretend.call("warehouse.cache.http.conditional_http_tween_factory"),
]
diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py
--- a/tests/unit/test_config.py
+++ b/tests/unit/test_config.py
@@ -17,6 +17,7 @@
import zope.interface
from pyramid import renderers
+from pyramid.tweens import EXCVIEW
from warehouse import config
from warehouse.utils.wsgi import ProxyFixer, VhmRootRemover
@@ -273,7 +274,7 @@ def __init__(self):
add_settings=pretend.call_recorder(
lambda d: configurator_settings.update(d)
),
- add_tween=pretend.call_recorder(lambda tween_factory: None),
+ add_tween=pretend.call_recorder(lambda tween_factory, **kw: None),
add_static_view=pretend.call_recorder(lambda name, path, **kw: None),
scan=pretend.call_recorder(lambda ignore: None),
)
@@ -428,6 +429,15 @@ def __init__(self):
assert configurator_obj.add_tween.calls == [
pretend.call("warehouse.config.content_security_policy_tween_factory"),
pretend.call("warehouse.config.require_https_tween_factory"),
+ pretend.call(
+ "warehouse.utils.compression.compression_tween_factory",
+ over=[
+ "warehouse.cache.http.conditional_http_tween_factory",
+ "pyramid_debugtoolbar.toolbar_tween_factory",
+ "warehouse.raven.raven_tween_factory",
+ EXCVIEW,
+ ],
+ ),
]
assert configurator_obj.add_static_view.calls == [
pretend.call(
diff --git a/tests/unit/test_raven.py b/tests/unit/test_raven.py
--- a/tests/unit/test_raven.py
+++ b/tests/unit/test_raven.py
@@ -14,7 +14,7 @@
import pytest
import raven as real_raven
-from pyramid.tweens import EXCVIEW
+from pyramid.tweens import EXCVIEW, INGRESS
from raven.middleware import Sentry as SentryMiddleware
from unittest import mock
@@ -125,7 +125,14 @@ def __init__(self):
pretend.call(raven._raven, name="raven", reify=True),
]
assert config.add_tween.calls == [
- pretend.call("warehouse.raven.raven_tween_factory", over=EXCVIEW),
+ pretend.call(
+ "warehouse.raven.raven_tween_factory",
+ over=EXCVIEW,
+ under=[
+ "pyramid_debugtoolbar.toolbar_tween_factory",
+ INGRESS,
+ ],
+ ),
]
assert config.add_wsgi_middleware.calls == [
pretend.call(SentryMiddleware, client=client_obj),
diff --git a/tests/unit/utils/test_compression.py b/tests/unit/utils/test_compression.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/utils/test_compression.py
@@ -0,0 +1,151 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pretend
+import pytest
+
+from pyramid.response import Response
+from webob.acceptparse import Accept, NoAccept
+from webob.response import gzip_app_iter
+
+from warehouse.utils.compression import _compressor as compressor
+from warehouse.utils.compression import compression_tween_factory
+
+
+class TestCompressor:
+
+ @pytest.mark.parametrize(
+ "vary",
+ [
+ ["Cookie"],
+ ["Authorization"],
+ ["Cookie", "Authorization"],
+ ],
+ )
+ def test_bails_if_vary(self, vary):
+ request = pretend.stub()
+ response = pretend.stub(vary=vary)
+
+ compressor(request, response)
+
+ def test_bails_if_content_encoding(self):
+ request = pretend.stub()
+ response = pretend.stub(
+ headers={"Content-Encoding": "something"},
+ vary=None,
+ )
+
+ compressor(request, response)
+
+ @pytest.mark.parametrize(
+ ("vary", "expected"),
+ [
+ (None, {"Accept-Encoding"}),
+ (["Something-Else"], {"Accept-Encoding", "Something-Else"}),
+ ],
+ )
+ def test_sets_vary(self, vary, expected):
+ request = pretend.stub(accept_encoding=NoAccept())
+ response = Response(body=b"foo")
+ response.vary = vary
+
+ compressor(request, response)
+
+ assert set(response.vary) == expected
+
+ def test_compresses_non_streaming(self):
+ decompressed_body = b"foofoofoofoofoofoofoofoofoofoofoofoofoofoo"
+ compressed_body = b"".join(list(gzip_app_iter([decompressed_body])))
+
+ request = pretend.stub(accept_encoding=Accept("gzip"))
+ response = Response(body=decompressed_body)
+ response.md5_etag()
+
+ original_etag = response.etag
+
+ compressor(request, response)
+
+ assert response.content_encoding == "gzip"
+ assert response.content_length == len(compressed_body)
+ assert response.body == compressed_body
+ assert response.etag != original_etag
+
+ def test_compresses_streaming(self):
+ decompressed_body = b"foofoofoofoofoofoofoofoofoofoofoofoofoofoo"
+ compressed_body = b"".join(list(gzip_app_iter([decompressed_body])))
+
+ request = pretend.stub(accept_encoding=Accept("gzip"))
+ response = Response(app_iter=iter([decompressed_body]))
+
+ compressor(request, response)
+
+ assert response.content_encoding == "gzip"
+ assert response.content_length is None
+ assert response.body == compressed_body
+
+ def test_compresses_streaming_with_etag(self):
+ decompressed_body = b"foofoofoofoofoofoofoofoofoofoofoofoofoofoo"
+ compressed_body = b"".join(list(gzip_app_iter([decompressed_body])))
+
+ request = pretend.stub(accept_encoding=Accept("gzip"))
+ response = Response(app_iter=iter([decompressed_body]))
+ response.etag = "foo"
+
+ compressor(request, response)
+
+ assert response.content_encoding == "gzip"
+ assert response.content_length is None
+ assert response.body == compressed_body
+ assert response.etag == "rfbezwKUdGjz6VPWDLDTvA"
+
+ def test_buffers_small_streaming(self):
+ decompressed_body = b"foofoofoofoofoofoofoofoofoofoofoofoofoofoo"
+ compressed_body = b"".join(list(gzip_app_iter([decompressed_body])))
+
+ request = pretend.stub(accept_encoding=Accept("gzip"))
+ response = Response(
+ app_iter=iter([decompressed_body]),
+ content_length=len(decompressed_body),
+ )
+
+ compressor(request, response)
+
+ assert response.content_encoding == "gzip"
+ assert response.content_length == len(compressed_body)
+ assert response.body == compressed_body
+
+ def test_doesnt_compress_too_small(self):
+ request = pretend.stub(accept_encoding=Accept("gzip"))
+ response = Response(body=b"foo")
+
+ compressor(request, response)
+
+ assert response.content_encoding is None
+ assert response.content_length == 3
+ assert response.body == b"foo"
+
+
+def test_compression_tween_factory():
+ callbacks = []
+
+ registry = pretend.stub()
+ request = pretend.stub(add_response_callback=callbacks.append)
+ response = pretend.stub()
+
+ def handler(inner_request):
+ assert inner_request is request
+ return response
+
+ tween = compression_tween_factory(handler, registry)
+
+ assert tween(request) is response
+ assert callbacks == [compressor]
| Compress any HTTP responses which are safe to do so
We should make a little middleware or so that will automatically compress HTTP responses (when possible) if they do not have `Vary: Cookie` or `Vary: Authorization` headers. This should be a good indicator that the current page does not have any secret information on it and thus shouldn't be a target for a CRIME attack.
| 2015-11-27T18:58:10Z | [] | [] |
|
pypi/warehouse | 863 | pypi__warehouse-863 | [
"601"
] | 3b3bdef0f7a9bbbcdef7346d5f2c04e9cc4c725d | diff --git a/warehouse/legacy/api/pypi.py b/warehouse/legacy/api/pypi.py
--- a/warehouse/legacy/api/pypi.py
+++ b/warehouse/legacy/api/pypi.py
@@ -15,6 +15,7 @@
import os.path
import re
import tempfile
+import zipfile
import packaging.specifiers
import packaging.version
@@ -35,7 +36,9 @@
from warehouse.packaging.interfaces import IFileStorage
from warehouse.packaging.models import (
Project, Release, Dependency, DependencyKind, Role, File, Filename,
+ JournalEntry,
)
+from warehouse.sessions import uses_session
from warehouse.utils.http import require_POST
@@ -415,13 +418,85 @@ def full_validate(self):
)
-# TODO: Uncomment the below code once the upload view is safe to be used on
-# warehouse.python.org. For now, we'll disable it so people can't use
-# Warehouse to upload and get broken or not properly validated data.
-# @view_config(
-# route_name="legacy.api.pypi.file_upload",
-# decorator=[require_POST, csrf_exempt, uses_session],
-# )
+_safe_zipnames = re.compile(r"(purelib|platlib|headers|scripts|data).+", re.I)
+
+
+def _is_valid_dist_file(filename, filetype):
+ """
+ Perform some basic checks to see whether the indicated file could be
+ a valid distribution file.
+ """
+
+ if filename.endswith(".exe"):
+ # The only valid filetype for a .exe file is "bdist_wininst".
+ if filetype != "bdist_wininst":
+ return False
+
+ # Ensure that the .exe is a valid zip file, and that all of the files
+ # contained within it have safe filenames.
+ try:
+ with zipfile.ZipFile(filename, "r") as zfp:
+ # We need the no branch below to work around a bug in
+ # coverage.py where it's detecting a missed branch where there
+ # isn't one.
+ for zipname in zfp.namelist(): # pragma: no branch
+ if not _safe_zipnames.match(zipname):
+ return False
+ except zipfile.BadZipFile:
+ return False
+ elif filename.endswith(".msi"):
+ # The only valid filetype for a .msi is "bdist_msi"
+ if filetype != "bdist_msi":
+ return False
+
+ # Check the first 8 bytes of the MSI file. This was taken from the
+ # legacy implementation of PyPI which itself took it from the
+ # implementation of `file` I believe.
+ with open(filename, "rb") as fp:
+ if fp.read(8) != b"\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1":
+ return False
+ elif filename.endswith(".zip") or filename.endswith(".egg"):
+ # Ensure that the .zip/.egg is a valid zip file, and that it has a
+ # PKG-INFO file.
+ try:
+ with zipfile.ZipFile(filename, "r") as zfp:
+ for zipname in zfp.namelist():
+ parts = os.path.split(zipname)
+ if len(parts) == 2 and parts[1] == "PKG-INFO":
+ # We need the no branch below to work around a bug in
+ # coverage.py where it's detecting a missed branch
+ # where there isn't one.
+ break # pragma: no branch
+ else:
+ return False
+ except zipfile.BadZipFile:
+ return False
+ elif filename.endswith(".whl"):
+ # Ensure that the .whl is a valid zip file, and that it has a WHEEL
+ # file.
+ try:
+ with zipfile.ZipFile(filename, "r") as zfp:
+ for zipname in zfp.namelist():
+ parts = os.path.split(zipname)
+ if len(parts) == 2 and parts[1] == "WHEEL":
+ # We need the no branch below to work around a bug in
+ # coverage.py where it's detecting a missed branch
+ # where there isn't one.
+ break # pragma: no branch
+ else:
+ return False
+ except zipfile.BadZipFile:
+ return False
+
+ # If we haven't yet decided it's not valid, then we'll assume it is and
+ # allow it.
+ return True
+
+
+@view_config(
+ route_name="legacy.api.pypi.file_upload",
+ decorator=[require_POST, csrf_exempt, uses_session],
+)
def file_upload(request):
# Before we do anything, if there isn't an authenticated user with this
# request, then we'll go ahead and bomb out.
@@ -499,6 +574,25 @@ def file_upload(request):
request.db.add(
Role(user=request.user, project=project, role_name="Owner")
)
+ # TODO: This should be handled by some sort of database trigger or a
+ # SQLAlchemy hook or the like instead of doing it inline in this
+ # view.
+ request.db.add(
+ JournalEntry(
+ name=project.name,
+ action="create",
+ submitted_by=request.user,
+ submitted_from=request.client_addr,
+ ),
+ )
+ request.db.add(
+ JournalEntry(
+ name=project.name,
+ action="add Owner {}".format(request.user.username),
+ submitted_by=request.user,
+ submitted_from=request.client_addr,
+ ),
+ )
# Check that the user has permission to do things to this project, if this
# is a new project this will act as a sanity check for the role we just
@@ -551,6 +645,18 @@ def file_upload(request):
}
)
request.db.add(release)
+ # TODO: This should be handled by some sort of database trigger or a
+ # SQLAlchemy hook or the like instead of doing it inline in this
+ # view.
+ request.db.add(
+ JournalEntry(
+ name=release.project.name,
+ version=release.version,
+ action="new release",
+ submitted_by=request.user,
+ submitted_from=request.client_addr,
+ ),
+ )
# TODO: We need a better solution to this than to just do it inline inside
# this method. Ideally the version field would just be sortable, but
@@ -590,6 +696,11 @@ def file_upload(request):
)
)
+ # Check the content type of what is being uploaded
+ if (not request.POST["content"].type
+ or request.POST["content"].type.startswith("image/")):
+ raise _exc_with_message(HTTPBadRequest, "Invalid distribution file.")
+
# Check to see if the file that was uploaded exists already or not.
if request.db.query(
request.db.query(File)
@@ -614,9 +725,11 @@ def file_upload(request):
file_size_limit = max(filter(None, [MAX_FILESIZE, project.upload_limit]))
with tempfile.TemporaryDirectory() as tmpdir:
+ temporary_filename = os.path.join(tmpdir, filename)
+
# Buffer the entire file onto disk, checking the hash of the file as we
# go along.
- with open(os.path.join(tmpdir, filename), "wb") as fp:
+ with open(temporary_filename, "wb") as fp:
file_size = 0
file_hash = hashlib.md5()
for chunk in iter(
@@ -639,7 +752,12 @@ def file_upload(request):
"from the uploaded file."
)
- # TODO: Check the file to make sure it is a valid distribution file.
+ # Check the file to make sure it is a valid distribution file.
+ if not _is_valid_dist_file(temporary_filename, form.filetype.data):
+ raise _exc_with_message(
+ HTTPBadRequest,
+ "Invalid distribution file.",
+ )
# Check that if it's a binary wheel, it's on a supported platform
if filename.endswith(".whl"):
@@ -677,9 +795,9 @@ def file_upload(request):
else:
has_signature = False
- # TODO: We need some sort of trigger that will automatically add
- # filenames to Filename instead of relying on this code running
- # inside of our upload API.
+ # TODO: This should be handled by some sort of database trigger or a
+ # SQLAlchemy hook or the like instead of doing it inline in this
+ # view.
request.db.add(Filename(filename=filename))
# Store the information about the file in the database.
@@ -695,6 +813,22 @@ def file_upload(request):
)
request.db.add(file_)
+ # TODO: This should be handled by some sort of database trigger or a
+ # SQLAlchemy hook or the like instead of doing it inline in this
+ # view.
+ request.db.add(
+ JournalEntry(
+ name=release.project.name,
+ version=release.version,
+ action="add {python_version} file {filename}".format(
+ python_version=file_.python_version,
+ filename=file_.filename,
+ ),
+ submitted_by=request.user,
+ submitted_from=request.client_addr,
+ ),
+ )
+
# TODO: We need a better answer about how to make this transactional so
# this won't take affect until after a commit has happened, for
# now we'll just ignore it and save it before the transaction is
diff --git a/warehouse/migrations/versions/477bc785c999_add_a_server_default_for_submitted_date.py b/warehouse/migrations/versions/477bc785c999_add_a_server_default_for_submitted_date.py
new file mode 100644
--- /dev/null
+++ b/warehouse/migrations/versions/477bc785c999_add_a_server_default_for_submitted_date.py
@@ -0,0 +1,44 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Add a server default for submitted_date
+
+Revision ID: 477bc785c999
+Revises: 6a03266b2d
+Create Date: 2015-12-16 16:19:59.419186
+"""
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
+
+revision = "477bc785c999"
+down_revision = "6a03266b2d"
+
+
+def upgrade():
+ op.alter_column(
+ "journals",
+ "submitted_date",
+ server_default=sa.func.now(),
+ nullable=False,
+ )
+
+
+def downgrade():
+ op.alter_column(
+ "journals",
+ "submitted_date",
+ existing_type=postgresql.TIMESTAMP(),
+ server_default=None,
+ nullable=True,
+ )
diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py
--- a/warehouse/packaging/models.py
+++ b/warehouse/packaging/models.py
@@ -438,7 +438,11 @@ def __table_args__(cls): # noqa
name = Column(Text)
version = Column(Text)
action = Column(Text)
- submitted_date = Column(DateTime(timezone=False))
+ submitted_date = Column(
+ DateTime(timezone=False),
+ nullable=False,
+ server_default=sql.func.now(),
+ )
_submitted_by = Column(
"submitted_by",
CIText,
| diff --git a/tests/unit/legacy/api/test_pypi.py b/tests/unit/legacy/api/test_pypi.py
--- a/tests/unit/legacy/api/test_pypi.py
+++ b/tests/unit/legacy/api/test_pypi.py
@@ -13,6 +13,7 @@
import io
import os.path
import tempfile
+import zipfile
from unittest import mock
@@ -29,6 +30,7 @@
from warehouse.packaging.interfaces import IFileStorage
from warehouse.packaging.models import (
File, Filename, Dependency, DependencyKind, Release, Project, Role,
+ JournalEntry,
)
from ....common.db.accounts import UserFactory
@@ -315,6 +317,122 @@ def test_full_validate_invalid(self, data):
form.full_validate()
+class TestFileValidation:
+
+ def test_defaults_to_true(self):
+ assert pypi._is_valid_dist_file("", "")
+
+ @pytest.mark.parametrize(
+ ("filename", "filetype"),
+ [
+ ("test.exe", "bdist_msi"),
+ ("test.msi", "bdist_wininst"),
+ ],
+ )
+ def test_bails_with_invalid_package_type(self, filename, filetype):
+ assert not pypi._is_valid_dist_file(filename, filetype)
+
+ @pytest.mark.parametrize(
+ ("filename", "filetype"),
+ [
+ ("test.exe", "bdist_wininst"),
+ ("test.zip", "sdist"),
+ ("test.egg", "bdist_egg"),
+ ("test.whl", "bdist_wheel"),
+ ],
+ )
+ def test_bails_with_invalid_zipfile(self, tmpdir, filename, filetype):
+ f = str(tmpdir.join(filename))
+
+ with open(f, "wb") as fp:
+ fp.write(b"this is not a valid zip file")
+
+ assert not pypi._is_valid_dist_file(f, filetype)
+
+ def test_wininst_unsafe_filename(self, tmpdir):
+ f = str(tmpdir.join("test.exe"))
+
+ with zipfile.ZipFile(f, "w") as zfp:
+ zfp.writestr("something/bar.py", b"the test file")
+
+ assert not pypi._is_valid_dist_file(f, "bdist_wininst")
+
+ def test_wininst_safe_filename(self, tmpdir):
+ f = str(tmpdir.join("test.exe"))
+
+ with zipfile.ZipFile(f, "w") as zfp:
+ zfp.writestr("purelib/bar.py", b"the test file")
+
+ assert pypi._is_valid_dist_file(f, "bdist_wininst")
+
+ def test_msi_invalid_header(self, tmpdir):
+ f = str(tmpdir.join("test.msi"))
+
+ with open(f, "wb") as fp:
+ fp.write(b"this is not the correct header for an msi")
+
+ assert not pypi._is_valid_dist_file(f, "bdist_msi")
+
+ def test_msi_valid_header(self, tmpdir):
+ f = str(tmpdir.join("test.msi"))
+
+ with open(f, "wb") as fp:
+ fp.write(b"\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1")
+
+ assert pypi._is_valid_dist_file(f, "bdist_msi")
+
+ def test_zip_no_pkg_info(self, tmpdir):
+ f = str(tmpdir.join("test.zip"))
+
+ with zipfile.ZipFile(f, "w") as zfp:
+ zfp.writestr("something.txt", b"Just a placeholder file")
+
+ assert not pypi._is_valid_dist_file(f, "sdist")
+
+ def test_zip_has_pkg_info(self, tmpdir):
+ f = str(tmpdir.join("test.zip"))
+
+ with zipfile.ZipFile(f, "w") as zfp:
+ zfp.writestr("something.txt", b"Just a placeholder file")
+ zfp.writestr("PKG-INFO", b"this is the package info")
+
+ assert pypi._is_valid_dist_file(f, "sdist")
+
+ def test_egg_no_pkg_info(self, tmpdir):
+ f = str(tmpdir.join("test.egg"))
+
+ with zipfile.ZipFile(f, "w") as zfp:
+ zfp.writestr("something.txt", b"Just a placeholder file")
+
+ assert not pypi._is_valid_dist_file(f, "bdist_egg")
+
+ def test_egg_has_pkg_info(self, tmpdir):
+ f = str(tmpdir.join("test.egg"))
+
+ with zipfile.ZipFile(f, "w") as zfp:
+ zfp.writestr("something.txt", b"Just a placeholder file")
+ zfp.writestr("PKG-INFO", b"this is the package info")
+
+ assert pypi._is_valid_dist_file(f, "bdist_egg")
+
+ def test_wheel_no_wheel_file(self, tmpdir):
+ f = str(tmpdir.join("test.whl"))
+
+ with zipfile.ZipFile(f, "w") as zfp:
+ zfp.writestr("something.txt", b"Just a placeholder file")
+
+ assert not pypi._is_valid_dist_file(f, "bdist_wheel")
+
+ def test_wheel_has_wheel_file(self, tmpdir):
+ f = str(tmpdir.join("test.whl"))
+
+ with zipfile.ZipFile(f, "w") as zfp:
+ zfp.writestr("something.txt", b"Just a placeholder file")
+ zfp.writestr("WHEEL", b"this is the package info")
+
+ assert pypi._is_valid_dist_file(f, "bdist_wheel")
+
+
class TestFileUpload:
@pytest.mark.parametrize("version", ["2", "3", "-1", "0", "dog", "cat"])
@@ -523,6 +641,8 @@ def test_successful_upload(self, tmpdir, monkeypatch, pyramid_config,
filename = "{}-{}.tar.gz".format(project.name, release.version)
+ db_request.user = user
+ db_request.client_addr = "10.10.10.40"
db_request.POST = MultiDict({
"metadata_version": "1.2",
"name": project.name,
@@ -533,6 +653,7 @@ def test_successful_upload(self, tmpdir, monkeypatch, pyramid_config,
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"A fake file."),
+ type="application/tar",
),
})
db_request.POST.extend([
@@ -602,6 +723,68 @@ def storage_service_store(path, file_path):
db_request.db.query(Filename) \
.filter(Filename.filename == filename).one()
+ # Ensure that all of our journal entries have been created
+ journals = (
+ db_request.db.query(JournalEntry)
+ .order_by("submitted_date")
+ .all()
+ )
+ assert [
+ (j.name, j.version, j.action, j.submitted_by, j.submitted_from)
+ for j in journals
+ ] == [
+ (
+ release.project.name,
+ release.version,
+ "add source file {}".format(filename),
+ user,
+ "10.10.10.40",
+ ),
+ ]
+
+ @pytest.mark.parametrize("content_type", [None, "image/foobar"])
+ def test_upload_fails_invlaid_content_type(self, tmpdir, monkeypatch,
+ pyramid_config, db_request,
+ content_type):
+ monkeypatch.setattr(tempfile, "tempdir", str(tmpdir))
+
+ pyramid_config.testing_securitypolicy(userid=1)
+ user = UserFactory.create()
+ project = ProjectFactory.create()
+ release = ReleaseFactory.create(project=project, version="1.0")
+ RoleFactory.create(user=user, project=project)
+
+ db_request.db.add(
+ Classifier(classifier="Environment :: Other Environment"),
+ )
+
+ filename = "{}-{}.tar.gz".format(project.name, release.version)
+
+ db_request.POST = MultiDict({
+ "metadata_version": "1.2",
+ "name": project.name,
+ "version": release.version,
+ "filetype": "sdist",
+ "pyversion": "source",
+ "md5_digest": "335c476dc930b959dda9ec82bd65ef19",
+ "content": pretend.stub(
+ filename=filename,
+ file=io.BytesIO(b"A fake file."),
+ type=content_type,
+ ),
+ })
+ db_request.POST.extend([
+ ("classifiers", "Environment :: Other Environment"),
+ ])
+
+ with pytest.raises(HTTPBadRequest) as excinfo:
+ pypi.file_upload(db_request)
+
+ resp = excinfo.value
+
+ assert resp.status_code == 400
+ assert resp.status == "400 Invalid distribution file."
+
@pytest.mark.parametrize("sig", [b"lol nope"])
def test_upload_fails_with_invalid_signature(self, pyramid_config,
db_request, sig):
@@ -623,6 +806,7 @@ def test_upload_fails_with_invalid_signature(self, pyramid_config,
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"A fake file."),
+ type="application/tar",
),
"gpg_signature": pretend.stub(
filename=filename + ".asc",
@@ -658,6 +842,7 @@ def test_upload_fails_with_invalid_classifier(self, pyramid_config,
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"A fake file."),
+ type="application/tar",
),
})
db_request.POST.extend([
@@ -694,6 +879,7 @@ def test_upload_fails_with_invalid_hash(self, pyramid_config, db_request):
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"A fake file."),
+ type="application/tar",
),
})
@@ -708,6 +894,37 @@ def test_upload_fails_with_invalid_hash(self, pyramid_config, db_request):
"from the uploaded file."
)
+ def test_upload_fails_with_invalid_file(self, pyramid_config, db_request):
+ pyramid_config.testing_securitypolicy(userid=1)
+
+ user = UserFactory.create()
+ project = ProjectFactory.create()
+ release = ReleaseFactory.create(project=project, version="1.0")
+ RoleFactory.create(user=user, project=project)
+
+ filename = "{}-{}.zip".format(project.name, release.version)
+
+ db_request.POST = MultiDict({
+ "metadata_version": "1.2",
+ "name": project.name,
+ "version": release.version,
+ "filetype": "sdist",
+ "md5_digest": "0cc175b9c0f1b6a831c399e269772661",
+ "content": pretend.stub(
+ filename=filename,
+ file=io.BytesIO(b"a"),
+ type="application/zip",
+ ),
+ })
+
+ with pytest.raises(HTTPBadRequest) as excinfo:
+ pypi.file_upload(db_request)
+
+ resp = excinfo.value
+
+ assert resp.status_code == 400
+ assert resp.status == "400 Invalid distribution file."
+
def test_upload_fails_with_too_large_file(self, pyramid_config,
db_request):
pyramid_config.testing_securitypolicy(userid=1)
@@ -728,6 +945,7 @@ def test_upload_fails_with_too_large_file(self, pyramid_config,
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
+ type="application/tar",
),
})
@@ -759,6 +977,7 @@ def test_upload_fails_with_too_large_signature(self, pyramid_config,
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"a"),
+ type="application/tar",
),
"gpg_signature": pretend.stub(
filename=filename + ".asc",
@@ -794,6 +1013,7 @@ def test_upload_fails_with_previously_used_filename(self, pyramid_config,
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
+ type="application/tar",
),
})
@@ -829,6 +1049,7 @@ def test_upload_fails_with_existing_file(self, pyramid_config, db_request):
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
+ type="application/tar",
),
})
@@ -862,6 +1083,7 @@ def test_upload_fails_with_wrong_filename(self, pyramid_config,
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
+ type="application/tar",
),
})
@@ -898,6 +1120,7 @@ def test_upload_fails_with_invalid_extension(self, pyramid_config,
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
+ type="application/tar",
),
})
@@ -933,6 +1156,7 @@ def test_upload_fails_with_unsafe_filename(self, pyramid_config,
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
+ type="application/tar",
),
})
@@ -962,6 +1186,7 @@ def test_upload_fails_without_permission(self, pyramid_config, db_request):
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"a" * (pypi.MAX_FILESIZE + 1)),
+ type="application/tar",
),
})
@@ -989,6 +1214,8 @@ def test_upload_succeeds_with_wheel(self, tmpdir, monkeypatch,
plat,
)
+ db_request.user = user
+ db_request.client_addr = "10.10.10.30"
db_request.POST = MultiDict({
"metadata_version": "1.2",
"name": project.name,
@@ -999,6 +1226,7 @@ def test_upload_succeeds_with_wheel(self, tmpdir, monkeypatch,
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"A fake file."),
+ type="application/tar",
),
})
@@ -1012,6 +1240,8 @@ def storage_service_store(path, file_path):
lambda svc: storage_service
)
+ monkeypatch.setattr(pypi, "_is_valid_dist_file", lambda *a, **kw: True)
+
resp = pypi.file_upload(db_request)
assert resp.status_code == 200
@@ -1038,8 +1268,28 @@ def storage_service_store(path, file_path):
db_request.db.query(Filename) \
.filter(Filename.filename == filename).one()
+ # Ensure that all of our journal entries have been created
+ journals = (
+ db_request.db.query(JournalEntry)
+ .order_by("submitted_date")
+ .all()
+ )
+ assert [
+ (j.name, j.version, j.action, j.submitted_by, j.submitted_from)
+ for j in journals
+ ] == [
+ (
+ release.project.name,
+ release.version,
+ "add cp34 file {}".format(filename),
+ user,
+ "10.10.10.30",
+ ),
+ ]
+
@pytest.mark.parametrize("plat", ["linux_x86_64", "linux_x86_64.win32"])
- def test_upload_fails_with_unsupported_wheel_plat(self, pyramid_config,
+ def test_upload_fails_with_unsupported_wheel_plat(self, monkeypatch,
+ pyramid_config,
db_request, plat):
pyramid_config.testing_securitypolicy(userid=1)
@@ -1064,9 +1314,12 @@ def test_upload_fails_with_unsupported_wheel_plat(self, pyramid_config,
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"A fake file."),
+ type="application/tar",
),
})
+ monkeypatch.setattr(pypi, "_is_valid_dist_file", lambda *a, **kw: True)
+
with pytest.raises(HTTPBadRequest) as excinfo:
pypi.file_upload(db_request)
@@ -1091,6 +1344,8 @@ def test_upload_succeeds_creates_release(self, pyramid_config, db_request):
filename = "{}-{}.tar.gz".format(project.name, "1.0")
+ db_request.user = user
+ db_request.client_addr = "10.10.10.20"
db_request.POST = MultiDict({
"metadata_version": "1.2",
"name": project.name,
@@ -1101,6 +1356,7 @@ def test_upload_succeeds_creates_release(self, pyramid_config, db_request):
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"A fake file."),
+ type="application/tar",
),
})
db_request.POST.extend([
@@ -1147,6 +1403,32 @@ def test_upload_succeeds_creates_release(self, pyramid_config, db_request):
db_request.db.query(Filename) \
.filter(Filename.filename == filename).one()
+ # Ensure that all of our journal entries have been created
+ journals = (
+ db_request.db.query(JournalEntry)
+ .order_by("submitted_date")
+ .all()
+ )
+ assert [
+ (j.name, j.version, j.action, j.submitted_by, j.submitted_from)
+ for j in journals
+ ] == [
+ (
+ release.project.name,
+ release.version,
+ "new release",
+ user,
+ "10.10.10.20",
+ ),
+ (
+ release.project.name,
+ release.version,
+ "add source file {}".format(filename),
+ user,
+ "10.10.10.20",
+ ),
+ ]
+
def test_upload_succeeds_creates_project(self, pyramid_config, db_request):
pyramid_config.testing_securitypolicy(userid=1)
@@ -1164,11 +1446,13 @@ def test_upload_succeeds_creates_project(self, pyramid_config, db_request):
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"A fake file."),
+ type="application/tar",
),
})
storage_service = pretend.stub(store=lambda path, content: None)
db_request.find_service = lambda svc: storage_service
+ db_request.client_addr = "10.10.10.10"
resp = pypi.file_upload(db_request)
@@ -1206,6 +1490,34 @@ def test_upload_succeeds_creates_project(self, pyramid_config, db_request):
db_request.db.query(Filename) \
.filter(Filename.filename == filename).one()
+ # Ensure that all of our journal entries have been created
+ journals = (
+ db_request.db.query(JournalEntry)
+ .order_by("submitted_date")
+ .all()
+ )
+ assert [
+ (j.name, j.version, j.action, j.submitted_by, j.submitted_from)
+ for j in journals
+ ] == [
+ ("example", None, "create", user, "10.10.10.10"),
+ (
+ "example",
+ None,
+ "add Owner {}".format(user.username),
+ user,
+ "10.10.10.10",
+ ),
+ ("example", "1.0", "new release", user, "10.10.10.10"),
+ (
+ "example",
+ "1.0",
+ "add source file example-1.0.tar.gz",
+ user,
+ "10.10.10.10",
+ ),
+ ]
+
def test_fails_without_user(self, pyramid_config, pyramid_request):
pyramid_config.testing_securitypolicy(userid=None)
| Uploading doesn't add a journal entry
Currently the upload API is not actually adding a journal entry, this needs to happen.
| 2015-12-16T14:24:27Z | [] | [] |
|
pypi/warehouse | 873 | pypi__warehouse-873 | [
"777"
] | 079874e9d90233276f82077da1862f157be25c24 | diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -21,11 +21,11 @@
from pyramid import renderers
from pyramid.config import Configurator as _Configurator
from pyramid.response import Response
-from pyramid.static import ManifestCacheBuster
from pyramid.tweens import EXCVIEW
from pyramid_rpc.xmlrpc import XMLRPCRenderer
from warehouse import __commit__
+from warehouse.utils.static import ManifestCacheBuster
from warehouse.utils.wsgi import ProxyFixer, VhmRootRemover
@@ -357,7 +357,11 @@ def configure(settings=None):
)
# Enable Warehouse to serve our static files
- config.add_static_view(name="static", path="warehouse:static/dist/")
+ config.add_static_view(
+ "static",
+ "warehouse:static/dist/",
+ cache_max_age=10 * 365 * 24 * 60 * 60, # 10 years
+ )
config.add_cache_buster(
"warehouse:static/dist/",
ManifestCacheBuster(
@@ -367,7 +371,7 @@ def configure(settings=None):
)
# Enable Warehouse to serve our locale files
- config.add_static_view(name="locales", path="warehouse:locales/")
+ config.add_static_view("locales", "warehouse:locales/")
# Enable support of passing certain values like remote host, client
# address, and protocol support in from an outer proxy to the application.
diff --git a/warehouse/utils/static.py b/warehouse/utils/static.py
new file mode 100644
--- /dev/null
+++ b/warehouse/utils/static.py
@@ -0,0 +1,30 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from pyramid.static import ManifestCacheBuster as _ManifestCacheBuster
+
+
+class ManifestCacheBuster(_ManifestCacheBuster):
+
+ def __call__(self, request, subpath, kw):
+ try:
+ return self.manifest[subpath], kw
+ except KeyError:
+ # We raise an error here even though the one from Pyramid does not.
+ # This is done because we want to be strict that all static files
+ # must be cache busted otherwise it is likely an error of some kind
+ # and should be remedied and without a loud error it's unlikely to
+ # be noticed.
+ raise ValueError(
+ "{} is not able to be cache busted.".format(subpath)
+ ) from None
| diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py
--- a/tests/unit/test_config.py
+++ b/tests/unit/test_config.py
@@ -275,7 +275,7 @@ def __init__(self):
lambda d: configurator_settings.update(d)
),
add_tween=pretend.call_recorder(lambda tween_factory, **kw: None),
- add_static_view=pretend.call_recorder(lambda name, path, **kw: None),
+ add_static_view=pretend.call_recorder(lambda *a, **kw: None),
add_cache_buster=pretend.call_recorder(lambda spec, buster: None),
scan=pretend.call_recorder(lambda ignore: None),
)
@@ -441,8 +441,12 @@ def __init__(self):
),
]
assert configurator_obj.add_static_view.calls == [
- pretend.call(name="static", path="warehouse:static/dist/"),
- pretend.call(name="locales", path="warehouse:locales/"),
+ pretend.call(
+ "static",
+ "warehouse:static/dist/",
+ cache_max_age=315360000,
+ ),
+ pretend.call("locales", "warehouse:locales/"),
]
assert configurator_obj.add_cache_buster.calls == [
pretend.call("warehouse:static/dist/", cachebuster_obj),
diff --git a/tests/unit/utils/test_static.py b/tests/unit/utils/test_static.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/utils/test_static.py
@@ -0,0 +1,32 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from warehouse.utils.static import ManifestCacheBuster
+
+
+class TestManifestCacheBuster:
+
+ def test_returns_when_valid(self):
+ cb = ManifestCacheBuster("warehouse:static/dist/manifest.json")
+ cb._manifest = {"/the/path/style.css": "/the/busted/path/style.css"}
+ result = cb(None, "/the/path/style.css", {"keyword": "arg"})
+
+ assert result == ("/the/busted/path/style.css", {"keyword": "arg"})
+
+ def test_raises_when_invalid(self):
+ cb = ManifestCacheBuster("warehouse:static/dist/manifest.json")
+ cb._manifest = {}
+
+ with pytest.raises(ValueError):
+ cb(None, "/the/path/style.css", {"keyword": "arg"})
| Handle stale references to static files
This may be solved by #776, but if static files are changed the old files are no longer available. This means that if `/` is cached with a reference to `main.hash1.css` but that is gone now and was replaced with `main.hash2.css`. We'll want some solution to this which might be #776 or might be some sort of purging mechanism.
| 2015-12-30T03:53:53Z | [] | [] |
|
pypi/warehouse | 878 | pypi__warehouse-878 | [
"783"
] | db556e02eaea911e3001b0132e891aa0557ee276 | diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -239,6 +239,7 @@ def configure(settings=None):
filters.setdefault("json", "warehouse.filters:tojson")
filters.setdefault("readme", "warehouse.filters:readme_renderer")
filters.setdefault("shorten_number", "warehouse.filters:shorten_number")
+ filters.setdefault("urlparse", "warehouse.filters:urlparse")
# We also want to register some global functions for Jinja
jglobals = config.get_settings().setdefault("jinja2.globals", {})
diff --git a/warehouse/filters.py b/warehouse/filters.py
--- a/warehouse/filters.py
+++ b/warehouse/filters.py
@@ -90,3 +90,7 @@ def shorten_number(value):
def tojson(value):
return json.dumps(value, sort_keys=True, separators=(",", ":"))
+
+
+def urlparse(value):
+ return urllib.parse.urlparse(value)
diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py
--- a/warehouse/packaging/models.py
+++ b/warehouse/packaging/models.py
@@ -12,6 +12,8 @@
import enum
+from collections import OrderedDict
+
from citext import CIText
from pyramid.security import Allow
from pyramid.threadlocal import get_current_request
@@ -316,6 +318,22 @@ def __table_args__(cls): # noqa
viewonly=True,
)
+ @property
+ def urls(self):
+ _urls = OrderedDict()
+
+ if self.home_page:
+ _urls["Homepage"] = self.home_page
+
+ for urlspec in self.project_urls:
+ name, url = urlspec.split(",", 1)
+ _urls[name] = url
+
+ if self.download_url and "Download" not in _urls:
+ _urls["Download"] = self.download_url
+
+ return _urls
+
@property
def has_meta(self):
return any([self.keywords])
| diff --git a/tests/unit/packaging/test_models.py b/tests/unit/packaging/test_models.py
--- a/tests/unit/packaging/test_models.py
+++ b/tests/unit/packaging/test_models.py
@@ -10,12 +10,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from collections import OrderedDict
+
import pretend
import pytest
from pyramid.security import Allow
-from warehouse.packaging.models import ProjectFactory, File
+from warehouse.packaging.models import (
+ ProjectFactory, Dependency, DependencyKind, File,
+)
from ...common.db.packaging import (
ProjectFactory as DBProjectFactory, ReleaseFactory as DBReleaseFactory,
@@ -107,6 +111,111 @@ def test_has_meta_false(self, db_session):
release = DBReleaseFactory.create()
assert not release.has_meta
+ @pytest.mark.parametrize(
+ ("home_page", "download_url", "project_urls", "expected"),
+ [
+ (None, None, [], OrderedDict()),
+ (
+ "https://example.com/home/",
+ None,
+ [],
+ OrderedDict([("Homepage", "https://example.com/home/")]),
+ ),
+ (
+ None,
+ "https://example.com/download/",
+ [],
+ OrderedDict([("Download", "https://example.com/download/")]),
+ ),
+ (
+ "https://example.com/home/",
+ "https://example.com/download/",
+ [],
+ OrderedDict([
+ ("Homepage", "https://example.com/home/"),
+ ("Download", "https://example.com/download/"),
+ ]),
+ ),
+ (
+ None,
+ None,
+ ["Source Code,https://example.com/source-code/"],
+ OrderedDict([
+ ("Source Code", "https://example.com/source-code/"),
+ ]),
+ ),
+ (
+ "https://example.com/home/",
+ "https://example.com/download/",
+ ["Source Code,https://example.com/source-code/"],
+ OrderedDict([
+ ("Homepage", "https://example.com/home/"),
+ ("Source Code", "https://example.com/source-code/"),
+ ("Download", "https://example.com/download/"),
+ ]),
+ ),
+ (
+ "https://example.com/home/",
+ "https://example.com/download/",
+ [
+ "Homepage,https://example.com/home2/",
+ "Source Code,https://example.com/source-code/",
+ ],
+ OrderedDict([
+ ("Homepage", "https://example.com/home2/"),
+ ("Source Code", "https://example.com/source-code/"),
+ ("Download", "https://example.com/download/"),
+ ]),
+ ),
+ (
+ "https://example.com/home/",
+ "https://example.com/download/",
+ [
+ "Source Code,https://example.com/source-code/",
+ "Download,https://example.com/download2/",
+ ],
+ OrderedDict([
+ ("Homepage", "https://example.com/home/"),
+ ("Source Code", "https://example.com/source-code/"),
+ ("Download", "https://example.com/download2/"),
+ ]),
+ ),
+ (
+ "https://example.com/home/",
+ "https://example.com/download/",
+ [
+ "Homepage,https://example.com/home2/",
+ "Source Code,https://example.com/source-code/",
+ "Download,https://example.com/download2/",
+ ],
+ OrderedDict([
+ ("Homepage", "https://example.com/home2/"),
+ ("Source Code", "https://example.com/source-code/"),
+ ("Download", "https://example.com/download2/"),
+ ]),
+ ),
+ ],
+ )
+ def test_urls(self, db_session, home_page, download_url, project_urls,
+ expected):
+ release = DBReleaseFactory.create(
+ home_page=home_page,
+ download_url=download_url,
+ )
+
+ for urlspec in project_urls:
+ db_session.add(
+ Dependency(
+ name=release.project.name,
+ version=release.version,
+ kind=DependencyKind.project_url.value,
+ specifier=urlspec,
+ )
+ )
+
+ # TODO: It'd be nice to test for the actual ordering here.
+ assert dict(release.urls) == dict(expected)
+
class TestFile:
diff --git a/tests/unit/test_filters.py b/tests/unit/test_filters.py
--- a/tests/unit/test_filters.py
+++ b/tests/unit/test_filters.py
@@ -10,6 +10,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import urllib.parse
+
import jinja2
import pretend
import pytest
@@ -153,3 +155,9 @@ def test_shorten_number(inp, expected):
)
def test_tojson(inp, expected):
assert filters.tojson(inp) == expected
+
+
+def test_urlparse():
+ inp = "https://google.com/foo/bar?a=b"
+ expected = urllib.parse.urlparse(inp)
+ assert filters.urlparse(inp) == expected
| Display project-urls at the top of the project detail page
We'll want to display the project urls at the top of the project detail page (where homepage/source code/etc are currently). Ideally this will try and select an appropriate icon based on the site.
| 2015-12-30T16:25:29Z | [] | [] |
|
pypi/warehouse | 889 | pypi__warehouse-889 | [
"820"
] | d9eb02a4232ff1d9511fbbb688b7bf7c0458c658 | diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py
--- a/warehouse/legacy/api/json.py
+++ b/warehouse/legacy/api/json.py
@@ -124,6 +124,10 @@ def json_release(release, request):
"has_sig": f.has_signature,
"comment_text": f.comment_text,
"md5_digest": f.md5_digest,
+ "digests": {
+ "md5": f.md5_digest,
+ "sha256": f.sha256_digest,
+ },
"size": f.size,
"downloads": f.downloads,
"upload_time": f.upload_time.strftime("%Y-%m-%dT%H:%M:%S"),
diff --git a/warehouse/legacy/api/xmlrpc.py b/warehouse/legacy/api/xmlrpc.py
--- a/warehouse/legacy/api/xmlrpc.py
+++ b/warehouse/legacy/api/xmlrpc.py
@@ -238,6 +238,10 @@ def release_urls(request, package_name, version):
"python_version": f.python_version,
"size": f.size,
"md5_digest": f.md5_digest,
+ "digests": {
+ "md5": f.md5_digest,
+ "sha256": f.sha256_digest,
+ },
"has_sig": f.has_signature,
"upload_time": f.upload_time,
"comment_text": f.comment_text,
| diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py
--- a/tests/unit/legacy/api/test_json.py
+++ b/tests/unit/legacy/api/test_json.py
@@ -185,6 +185,10 @@ def test_detail_renders(self, pyramid_config, db_request):
"filename": files[0].filename,
"has_sig": True,
"md5_digest": files[0].md5_digest,
+ "digests": {
+ "md5": files[0].md5_digest,
+ "sha256": files[0].sha256_digest,
+ },
"packagetype": None,
"python_version": "source",
"size": 200,
@@ -201,6 +205,10 @@ def test_detail_renders(self, pyramid_config, db_request):
"filename": files[1].filename,
"has_sig": True,
"md5_digest": files[1].md5_digest,
+ "digests": {
+ "md5": files[1].md5_digest,
+ "sha256": files[1].sha256_digest,
+ },
"packagetype": None,
"python_version": "source",
"size": 200,
@@ -219,6 +227,10 @@ def test_detail_renders(self, pyramid_config, db_request):
"filename": files[1].filename,
"has_sig": True,
"md5_digest": files[1].md5_digest,
+ "digests": {
+ "md5": files[1].md5_digest,
+ "sha256": files[1].sha256_digest,
+ },
"packagetype": None,
"python_version": "source",
"size": 200,
diff --git a/tests/unit/legacy/api/test_xmlrpc.py b/tests/unit/legacy/api/test_xmlrpc.py
--- a/tests/unit/legacy/api/test_xmlrpc.py
+++ b/tests/unit/legacy/api/test_xmlrpc.py
@@ -373,6 +373,10 @@ def test_release_urls(db_request):
"python_version": file_.python_version,
"size": file_.size,
"md5_digest": file_.md5_digest,
+ "digests": {
+ "md5": file_.md5_digest,
+ "sha256": file_.sha256_digest,
+ },
"has_sig": file_.has_signature,
"upload_time": file_.upload_time,
"comment_text": file_.comment_text,
| Replace md5 with sha256 on UI
Since PyPI is being rewritten, it's a good opportunity to replace md5 hashes with sha256.
NOTE: there is also #681 which talks about the upload functionality related to md5
| It's planned to do that yes. It requires a bit of work to migrate everything because we have to loop over _all_ of the current packages and recompute their hashes.
| 2016-01-04T17:43:14Z | [] | [] |
pypi/warehouse | 891 | pypi__warehouse-891 | [
"681"
] | 924d651603e60ca15cb8d68bede1037cbf4e7b32 | diff --git a/warehouse/legacy/api/pypi.py b/warehouse/legacy/api/pypi.py
--- a/warehouse/legacy/api/pypi.py
+++ b/warehouse/legacy/api/pypi.py
@@ -342,9 +342,19 @@ class MetadataForm(forms.Form):
comment = wtforms.StringField(validators=[wtforms.validators.Optional()])
md5_digest = wtforms.StringField(
validators=[
- wtforms.validators.DataRequired(),
+ wtforms.validators.Optional(),
],
)
+ sha256_digest = wtforms.StringField(
+ validators=[
+ wtforms.validators.Optional(),
+ wtforms.validators.Regexp(
+ r"^[A-F0-9]{64}$",
+ re.IGNORECASE,
+ message="Must be a valid, hex encoded, SHA256 message digest.",
+ ),
+ ]
+ )
# Legacy dependency information
requires = ListField(
@@ -417,6 +427,12 @@ def full_validate(self):
"The only valid Python version for a sdist is 'source'."
)
+ # We *must* have at least one digest to verify against.
+ if not self.md5_digest.data and not self.sha256_digest.data:
+ raise wtforms.validators.ValidationError(
+ "Must include at least one message digest.",
+ )
+
_safe_zipnames = re.compile(r"(purelib|platlib|headers|scripts|data).+", re.I)
@@ -741,15 +757,21 @@ def file_upload(request):
for hasher in file_hashes.values():
hasher.update(chunk)
- # Actually verify that the md5 hash of the file matches the expected
- # md5 hash. We probably don't actually need to use hmac.compare_digest
- # here since both the md5_digest and the file whose file_hash we've
- # computed comes from the remote user, however better safe than sorry.
- if not hmac.compare_digest(
- form.md5_digest.data, file_hashes["md5"].hexdigest()):
+ # Actually verify the digests that we've gotten. We're going to use
+ # hmac.compare_digest even though we probably don't actually need to
+ # because it's better safe than sorry. In the case of multiple digests
+ # we expect them all to be given.
+ if not all([
+ hmac.compare_digest(
+ getattr(form, "{}_digest".format(digest_name)).data.lower(),
+ digest_value.hexdigest().lower(),
+ )
+ for digest_name, digest_value in file_hashes.items()
+ if getattr(form, "{}_digest".format(digest_name)).data
+ ]):
raise _exc_with_message(
HTTPBadRequest,
- "The MD5 digest supplied does not match a digest calculated "
+ "The digest supplied does not match a digest calculated "
"from the uploaded file."
)
| diff --git a/tests/unit/legacy/api/test_pypi.py b/tests/unit/legacy/api/test_pypi.py
--- a/tests/unit/legacy/api/test_pypi.py
+++ b/tests/unit/legacy/api/test_pypi.py
@@ -297,8 +297,25 @@ class TestMetadataForm:
@pytest.mark.parametrize(
"data",
[
- {"filetype": "sdist"},
- {"filetpye": "bdist_wheel", "pyversion": "3.4"},
+ {"filetype": "sdist", "md5_digest": "bad"},
+ {
+ "filetpye": "bdist_wheel",
+ "pyversion": "3.4",
+ "md5_digest": "bad",
+ },
+ {"filetype": "sdist", "sha256_digest": "bad"},
+ {
+ "filetpye": "bdist_wheel",
+ "pyversion": "3.4",
+ "sha256_digest": "bad",
+ },
+ {"filetype": "sdist", "md5_digest": "bad", "sha256_digest": "bad"},
+ {
+ "filetpye": "bdist_wheel",
+ "pyversion": "3.4",
+ "md5_digest": "bad",
+ "sha256_digest": "bad",
+ },
],
)
def test_full_validate_valid(self, data):
@@ -489,6 +506,7 @@ def test_fails_invalid_version(self, pyramid_config, pyramid_request,
"metadata_version": "1.2",
"name": "example",
"version": "1.0",
+ "md5_digest": "bad",
},
"filetype: This field is required.",
),
@@ -509,6 +527,7 @@ def test_fails_invalid_version(self, pyramid_config, pyramid_request,
"version": "1.0",
"filetype": "bdist_wat",
"pyversion": "1.0",
+ "md5_digest": "bad",
},
"filetype: Unknown type of file.",
),
@@ -524,7 +543,7 @@ def test_fails_invalid_version(self, pyramid_config, pyramid_request,
"'source'.",
),
- # md5_digest errors.
+ # digest errors.
(
{
"metadata_version": "1.2",
@@ -532,7 +551,7 @@ def test_fails_invalid_version(self, pyramid_config, pyramid_request,
"version": "1.0",
"filetype": "sdist",
},
- "md5_digest: This field is required.",
+ "__all__: Must include at least one message digest.",
),
# summary errors
@@ -625,9 +644,53 @@ def test_upload_cleans_unknown_values(self, pyramid_config, db_request):
assert "name" not in db_request.POST
- @pytest.mark.parametrize("has_signature", [True, False])
+ @pytest.mark.parametrize(
+ ("has_signature", "digests"),
+ [
+ (True, {"md5_digest": "335c476dc930b959dda9ec82bd65ef19"}),
+ (
+ True,
+ {
+ "sha256_digest": (
+ "4a8422abcc484a4086bdaa618c65289f749433b07eb433c51c4e3"
+ "77143ff5fdb"
+ ),
+ },
+ ),
+ (False, {"md5_digest": "335c476dc930b959dda9ec82bd65ef19"}),
+ (
+ False,
+ {
+ "sha256_digest": (
+ "4a8422abcc484a4086bdaa618c65289f749433b07eb433c51c4e3"
+ "77143ff5fdb"
+ ),
+ },
+ ),
+ (
+ True,
+ {
+ "md5_digest": "335c476dc930b959dda9ec82bd65ef19",
+ "sha256_digest": (
+ "4a8422abcc484a4086bdaa618c65289f749433b07eb433c51c4e3"
+ "77143ff5fdb"
+ ),
+ },
+ ),
+ (
+ False,
+ {
+ "md5_digest": "335c476dc930b959dda9ec82bd65ef19",
+ "sha256_digest": (
+ "4a8422abcc484a4086bdaa618c65289f749433b07eb433c51c4e3"
+ "77143ff5fdb"
+ ),
+ },
+ ),
+ ],
+ )
def test_successful_upload(self, tmpdir, monkeypatch, pyramid_config,
- db_request, has_signature):
+ db_request, has_signature, digests):
monkeypatch.setattr(tempfile, "tempdir", str(tmpdir))
pyramid_config.testing_securitypolicy(userid=1)
@@ -650,7 +713,6 @@ def test_successful_upload(self, tmpdir, monkeypatch, pyramid_config,
"version": release.version,
"filetype": "sdist",
"pyversion": "source",
- "md5_digest": "335c476dc930b959dda9ec82bd65ef19",
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"A fake file."),
@@ -660,6 +722,7 @@ def test_successful_upload(self, tmpdir, monkeypatch, pyramid_config,
db_request.POST.extend([
("classifiers", "Environment :: Other Environment"),
])
+ db_request.POST.update(digests)
if has_signature:
db_request.POST["gpg_signature"] = pretend.stub(
@@ -861,7 +924,41 @@ def test_upload_fails_with_invalid_classifier(self, pyramid_config,
"valid choice for this field"
)
- def test_upload_fails_with_invalid_hash(self, pyramid_config, db_request):
+ @pytest.mark.parametrize(
+ "digests",
+ [
+ {"md5_digest": "bad"},
+ {
+ "sha256_digest": (
+ "badbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadbad"
+ "badbadb"
+ ),
+ },
+ {
+ "md5_digest": "bad",
+ "sha256_digest": (
+ "badbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadbad"
+ "badbadb"
+ ),
+ },
+ {
+ "md5_digest": "335c476dc930b959dda9ec82bd65ef19",
+ "sha256_digest": (
+ "badbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadbadbad"
+ "badbadb"
+ ),
+ },
+ {
+ "md5_digest": "bad",
+ "sha256_digest": (
+ "4a8422abcc484a4086bdaa618c65289f749433b07eb433c51c4e37714"
+ "3ff5fdb"
+ ),
+ },
+ ],
+ )
+ def test_upload_fails_with_invalid_digest(self, pyramid_config, db_request,
+ digests):
pyramid_config.testing_securitypolicy(userid=1)
user = UserFactory.create()
@@ -876,13 +973,13 @@ def test_upload_fails_with_invalid_hash(self, pyramid_config, db_request):
"name": project.name,
"version": release.version,
"filetype": "sdist",
- "md5_digest": "nope!",
"content": pretend.stub(
filename=filename,
file=io.BytesIO(b"A fake file."),
type="application/tar",
),
})
+ db_request.POST.update(digests)
with pytest.raises(HTTPBadRequest) as excinfo:
pypi.file_upload(db_request)
@@ -891,7 +988,7 @@ def test_upload_fails_with_invalid_hash(self, pyramid_config, db_request):
assert resp.status_code == 400
assert resp.status == (
- "400 The MD5 digest supplied does not match a digest calculated "
+ "400 The digest supplied does not match a digest calculated "
"from the uploaded file."
)
| Support package upload with a SHA-256 checksum
Currently package upload respects `md5_digest` as a form parameter and [validates the digest](https://github.com/pypa/warehouse/blob/master/warehouse/legacy/api/pypi.py#L646).
Discussion took place earlier this year (or last year) about migrating to using SHA-256 as the digest algorithm instead. We should begin to validate that and support it so tools (e.g., twine) can start uploading packages using this digest algorithm.
| 2016-01-04T21:13:18Z | [] | [] |
|
pypi/warehouse | 960 | pypi__warehouse-960 | [
"803"
] | b94787b5c76370d204a29795a0dc4f3b4743d4ce | diff --git a/warehouse/utils/paginate.py b/warehouse/utils/paginate.py
--- a/warehouse/utils/paginate.py
+++ b/warehouse/utils/paginate.py
@@ -18,12 +18,18 @@ class _ElasticsearchWrapper:
def __init__(self, query):
self.query = query
self.results = None
+ self.best_guess = None
def __getitem__(self, range):
if self.results is not None:
raise RuntimeError("Cannot reslice after having already sliced.")
self.results = self.query[range].execute()
+ if hasattr(self.results, "suggest"):
+ suggestion = self.results.suggest.name_suggestion[0]
+ if suggestion.options:
+ self.best_guess = suggestion.options[0]
+
return list(self.results)
def __len__(self):
diff --git a/warehouse/views.py b/warehouse/views.py
--- a/warehouse/views.py
+++ b/warehouse/views.py
@@ -163,6 +163,10 @@ def search(request):
"maintainer_email", "home_page", "license", "summary",
"description", "keywords", "platform", "download_url",
],
+ ).suggest(
+ name="name_suggestion",
+ text=request.params["q"],
+ term={"field": "name"}
)
else:
query = request.es.query()
| diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py
--- a/tests/unit/test_views.py
+++ b/tests/unit/test_views.py
@@ -100,7 +100,10 @@ def test_with_a_query(self, monkeypatch, page):
params = {"q": "foo bar"}
if page is not None:
params["page"] = page
- query = pretend.stub()
+ suggest = pretend.stub()
+ query = pretend.stub(
+ suggest=pretend.call_recorder(lambda *a, **kw: suggest),
+ )
request = pretend.stub(
es=pretend.stub(
query=pretend.call_recorder(lambda *a, **kw: query),
@@ -118,7 +121,7 @@ def test_with_a_query(self, monkeypatch, page):
assert search(request) == {"page": page_obj, "term": params.get("q")}
assert page_cls.calls == [
- pretend.call(query, url_maker=url_maker, page=page or 1),
+ pretend.call(suggest, url_maker=url_maker, page=page or 1),
]
assert url_maker_factory.calls == [pretend.call(request)]
assert request.es.query.calls == [
@@ -132,6 +135,13 @@ def test_with_a_query(self, monkeypatch, page):
],
),
]
+ assert query.suggest.calls == [
+ pretend.call(
+ name="name_suggestion",
+ term={"field": "name"},
+ text="foo bar",
+ ),
+ ]
@pytest.mark.parametrize("page", [None, 1, 5])
def test_without_a_query(self, monkeypatch, page):
diff --git a/tests/unit/utils/test_paginate.py b/tests/unit/utils/test_paginate.py
--- a/tests/unit/utils/test_paginate.py
+++ b/tests/unit/utils/test_paginate.py
@@ -18,6 +18,18 @@
from warehouse.utils import paginate
+class FakeSuggestion:
+
+ def __init__(self, options):
+ self.options = options
+
+
+class FakeSuggest:
+
+ def __init__(self, name_suggestion):
+ self.name_suggestion = name_suggestion
+
+
class FakeResult:
def __init__(self, data, total):
@@ -33,6 +45,18 @@ def __iter__(self):
yield i
+class FakeSuggestResult(FakeResult):
+
+ def __init__(self, data, total, options):
+ super().__init__(data, total)
+ self.options = options
+
+ @property
+ def suggest(self):
+ suggestion = FakeSuggestion(options=self.options)
+ return FakeSuggest(name_suggestion=[suggestion])
+
+
class FakeQuery:
def __init__(self, fake):
@@ -51,6 +75,18 @@ def execute(self):
return FakeResult(self.fake[self.range], len(self.fake))
+class FakeSuggestQuery(FakeQuery):
+
+ def __init__(self, fake, options):
+ super().__init__(fake)
+ self.options = options
+
+ def execute(self):
+ data = self.fake[self.range]
+ total = len(self.fake)
+ return FakeSuggestResult(data, total, self.options)
+
+
class TestElasticsearchWrapper:
def test_slices_and_length(self):
@@ -71,6 +107,21 @@ def test_len_before_slice_fails(self):
with pytest.raises(RuntimeError):
len(wrapper)
+ def test_best_guess_suggestion(self):
+ fake_option = pretend.stub()
+ query = FakeSuggestQuery([1, 2, 3, 4, 5, 6], options=[fake_option])
+ wrapper = paginate._ElasticsearchWrapper(query)
+ wrapper[1:3]
+
+ assert wrapper.best_guess == fake_option
+
+ def test_best_guess_suggestion_no_options(self):
+ query = FakeSuggestQuery([1, 2, 3, 4, 5, 6], options=[])
+ wrapper = paginate._ElasticsearchWrapper(query)
+ wrapper[1:3]
+
+ assert wrapper.best_guess is None
+
def test_elasticsearch_page_has_wrapper(monkeypatch):
page_obj = pretend.stub()
| Default search message
We need to add a message for when a search doesn't match any packages.
| This message should use a `<div class="callout-block"></div>`
Hi @nlhkabu, is this close to what you had in mind?
Original:
<img width="852" alt="screen shot 2016-02-04 at 9 59 00 am" src="https://cloud.githubusercontent.com/assets/294415/12818720/f9a245f0-cb25-11e5-9354-8f9290b7cb62.png">
New:
<img width="849" alt="screen shot 2016-02-04 at 9 57 08 am" src="https://cloud.githubusercontent.com/assets/294415/12818669/ce177ce8-cb25-11e5-9c41-9e76129cb489.png">
Should the match count element stay, go away or be replaced by this message?
That's perfect, thanks @di!
I don't think the duplication is a problem - it just reinforces the message.
What would be **awesome** is if we could display some other search terms that are close to the input...
For example, if I search "Pyranid", I should see:
> There were no packages matching _Pyranid_ did you mean **Pyramid**
ping @dstufft and @HonzaKral - is this possible with elastic?
Looks like it can, [Phrase Suggester](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-suggesters-phrase.html).
Yes, as @dstufft mentioned - Phrase or Term suggesters is what you are looking for. The difference is that `terms` suggester is only looking at individual words in isolation whereas `phrase` is considering their use together. The simple demonstration is that terms suggester cannot see anything wrong with `"johnny walker"` (since both words are correct and exist in the index) whereas phrase suggester will suggest `"johnnie walker"`.
I would also suggest using completion suggester for auto complete, might help a lot. Let me know if you'd like to discuss the options in more detail, I an hop on irc. Thanks
Fantastic! @dstufft I'm guessing this is not on the list for "become PyPI" - so maybe we should open another issue?
Hi @nlhkabu, how does this look?
<img width="834" alt="screen shot 2016-02-04 at 5 22 51 pm" src="https://cloud.githubusercontent.com/assets/294415/12831749/f54e703a-cb63-11e5-9f95-de3a06f5f7e9.png">
The link leads to `/search/?q=pybankid`
(For whatever reason the `pyramid` package is not actually in the development DB, so `pybankid` is elasticsearch's best guess)
| 2016-02-06T16:22:05Z | [] | [] |
pypi/warehouse | 975 | pypi__warehouse-975 | [
"855"
] | fc8bd4752ec571e9ea55cfad981c3f21c597330d | diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -236,6 +236,7 @@ def configure(settings=None):
# We'll want to configure some filters for Jinja2 as well.
filters = config.get_settings().setdefault("jinja2.filters", {})
+ filters.setdefault("format_tags", "warehouse.filters:format_tags")
filters.setdefault("json", "warehouse.filters:tojson")
filters.setdefault("readme", "warehouse.filters:readme")
filters.setdefault("shorten_number", "warehouse.filters:shorten_number")
diff --git a/warehouse/filters.py b/warehouse/filters.py
--- a/warehouse/filters.py
+++ b/warehouse/filters.py
@@ -13,6 +13,7 @@
import binascii
import hmac
import json
+import re
import urllib.parse
import html5lib
@@ -95,3 +96,21 @@ def tojson(value):
def urlparse(value):
return urllib.parse.urlparse(value)
+
+
+def format_tags(tags):
+ # split tags
+ if re.search(r',', tags):
+ split_tags = re.split(r'\s*,\s*', tags)
+ elif re.search(r';', tags):
+ split_tags = re.split(r'\s*;\s*', tags)
+ else:
+ split_tags = re.split(r'\s+', tags)
+
+ # strip whitespace, quotes, double quotes
+ stripped_tags = [re.sub(r'^["\'\s]+|["\'\s]+$', '', t) for t in split_tags]
+
+ # remove any empty tags
+ formatted_tags = [t for t in stripped_tags if t]
+
+ return formatted_tags
| diff --git a/tests/unit/test_filters.py b/tests/unit/test_filters.py
--- a/tests/unit/test_filters.py
+++ b/tests/unit/test_filters.py
@@ -165,3 +165,28 @@ def test_urlparse():
inp = "https://google.com/foo/bar?a=b"
expected = urllib.parse.urlparse(inp)
assert filters.urlparse(inp) == expected
+
+
+@pytest.mark.parametrize(
+ ("inp", "expected"),
+ [
+ (
+ "'python', finance, \"data\", code , test automation",
+ ["python", "finance", "data", "code", "test automation"]
+ ),
+ (
+ "'python'; finance; \"data\"; code ; test automation",
+ ["python", "finance", "data", "code", "test automation"]
+ ),
+ (
+ "a \"b\" c d 'e'",
+ ["a", "b", "c", "d", "e"]
+ ),
+ (
+ " ' ' \" \"",
+ []
+ )
+ ]
+)
+def test_format_tags(inp, expected):
+ assert filters.format_tags(inp) == expected
| Make tag display consistent
![tags-alt](https://cloud.githubusercontent.com/assets/3323703/11621054/3eb9941c-9cb0-11e5-8119-150e0a0648c0.png)
![tags](https://cloud.githubusercontent.com/assets/3323703/11621055/3edf45fe-9cb0-11e5-8e4e-f5c8973c44f9.png)
We need to make the output of tags consistent. Each tag should be followed by a comma and space `,` i.e `tag1, tag2, tag3`
| This will require some massaging of the data. Currently the tags field is just a raw text box that people can put whatever they want into. Some people do `tag,tag,tag` others do `tag, tag, tag` others do `tag tag tag` and I'm sure there are other variations as well.
How about:
1. Split the tags up with `/[, ]+/g`.
2. Split the tag icon and text into two separate columns, like:
``` html
<div class="tags">
<div class="icon">TAG_ICON</div>
<ul>
<li>TAG_1</li>
<li>TAG_2</li>
</ul>
</div>
```
?
@shiroyuki Some tag fields have multiple-word tags separated by commas, e.g. `tagA,tagB,another tag`, this would need to be taken into consideration, as this should produce `['tagA', 'tagB', 'another tag']` and not `['tagA', 'tagB', 'another', 'tag']`
So... this means the splitter function has to be:
``` javascript
function splitTags(tagText) {
var tags = [];
return tagText.match(/,/)
? tagText.split(/\s*,\s*/g) // split with comma surrounding by any whitespaces
: tagText.split(/\s+/g) // split with whitespace
;
}
```
@shiroyuki I found a few more example cases that we may want to be able to handle as well:
- Double-quoted, separated with commas: `"Record","speaking","Toefl","Rehearse","Educational","pickledb","Tkinter"`
- Double-quoted, separated with commas and spaces:
`"consul", "backup", "yml", "key-value", "boto", "s3", "s3bucket", "bucket", "cli", "command"`
- Separated with semicolons and spaces:
`natural language processing; thesaurus`
So probably:
- Split on comma or semicolon if present, if not, spaces;
- Strip whitespace, single quotes and double quotes from the split elements.
That sounds about right. Wow, I'm impressed. :D
On the HTML side, I'd prefer if we kept the existing tag structure and simply replace the content, returning a comma separated string.
@nlhkabu IMO, the only reason to wrap each tag in a HTML tag is so that we can control the white spaces. For example, instead of...
```
something this long
```
would not end up like
```
something <next line>
this long
```
.
In this case, each tag should be wrapped in a span with `display: inline-block` applied.
``` javascript
function stripTag(tag) {
return tag.replace(/^["'\s]+|["'\s]+$/g, '');
}
function splitTags(tagText) {
var tags = [];
return tagText.match(/,/)
? tagText.split(/\s*,\s*/g) // split with comma surrounding by any whitespaces
: tagText.split(/\s+/g) // split with whitespace
;
}
function formatTags(tagText) {
return $.map(splitTags(tagText), stripTag);
}
```
Does this look like the right sort of thing? Or should the server split the tags (e.g. with a jinja2 filter, which could be called `format_tags`) then render them in the template? Something like:
``` html+jinja
{% if release.keywords %}
<p class="tags"><i class="fa fa-tags"></i>
{% for keyword in release.keywords | format_tags %}
<span class='keyword'>{{ keyword }}</span>
{% endfor %}
</p>
{% endif %}
```
(disclaimer: this is my first comment on this project! so if I have been too unclear or done something obviously wrong please don't be afraid to tell me.)
I would prefer to have it handled inside of a jinja2 filter.
And no worries! You're doing fine :)
I think the Jinja2 approach is better. :D
| 2016-02-19T01:50:22Z | [] | [] |
pypi/warehouse | 990 | pypi__warehouse-990 | [
"390"
] | 8f6d06c400ea45b6c16c7e5b0b59cc62391b1560 | diff --git a/warehouse/i18n/__init__.py b/warehouse/i18n/__init__.py
--- a/warehouse/i18n/__init__.py
+++ b/warehouse/i18n/__init__.py
@@ -31,6 +31,10 @@ def includeme(config):
"format_datetime",
"warehouse.i18n.filters:format_datetime",
)
+ filters.setdefault(
+ "format_rfc822_datetime",
+ "warehouse.i18n.filters:format_rfc822_datetime",
+ )
# Register our utility functions with Jinja2
jglobals = config.get_settings().setdefault("jinja2.globals", {})
diff --git a/warehouse/i18n/filters.py b/warehouse/i18n/filters.py
--- a/warehouse/i18n/filters.py
+++ b/warehouse/i18n/filters.py
@@ -11,6 +11,7 @@
# limitations under the License.
import babel.dates
+import email.utils
import jinja2
from pyramid.threadlocal import get_current_request
@@ -28,3 +29,8 @@ def format_datetime(ctx, *args, **kwargs):
request = ctx.get("request") or get_current_request()
kwargs.setdefault("locale", request.locale)
return babel.dates.format_datetime(*args, **kwargs)
+
+
+@jinja2.contextfilter
+def format_rfc822_datetime(ctx, dt, *args, **kwargs):
+ return email.utils.formatdate(dt.timestamp(), usegmt=True)
diff --git a/warehouse/legacy/action_routing.py b/warehouse/legacy/action_routing.py
--- a/warehouse/legacy/action_routing.py
+++ b/warehouse/legacy/action_routing.py
@@ -28,9 +28,25 @@ def add_pypi_action_route(config, name, action, **kwargs):
)
+def add_pypi_action_redirect(config, action, target, **kwargs):
+ custom_predicates = kwargs.pop("custom_predicates", [])
+ custom_predicates += [pypi_action(action)]
+
+ config.add_redirect(
+ "/pypi", target,
+ custom_predicates=custom_predicates,
+ **kwargs
+ )
+
+
def includeme(config):
config.add_directive(
"add_pypi_action_route",
add_pypi_action_route,
action_wrap=False,
)
+ config.add_directive(
+ "add_pypi_action_redirect",
+ add_pypi_action_redirect,
+ action_wrap=False,
+ )
diff --git a/warehouse/redirects.py b/warehouse/redirects.py
--- a/warehouse/redirects.py
+++ b/warehouse/redirects.py
@@ -13,16 +13,16 @@
from pyramid.httpexceptions import HTTPMovedPermanently
-def redirect_view_factory(target, redirect=HTTPMovedPermanently):
+def redirect_view_factory(target, redirect=HTTPMovedPermanently, **kw):
def redirect_view(request):
return redirect(target.format(_request=request, **request.matchdict))
return redirect_view
def add_redirect(config, source, target, **kw):
- route_name = "warehouse.redirects." + source
+ route_name = "warehouse.redirects." + source + str(kw)
- config.add_route(route_name, source)
+ config.add_route(route_name, source, **kw)
config.add_view(redirect_view_factory(target, **kw), route_name=route_name)
diff --git a/warehouse/routes.py b/warehouse/routes.py
--- a/warehouse/routes.py
+++ b/warehouse/routes.py
@@ -60,6 +60,10 @@ def includeme(config):
)
config.add_route("packaging.file", "/packages/{path:.*}", read_only=True)
+ # RSS
+ config.add_route("rss.updates", "/rss/updates.xml", read_only=True)
+ config.add_route("rss.packages", "/rss/packages.xml", read_only=True)
+
# Legacy URLs
config.add_route("legacy.api.simple.index", "/simple/", read_only=True)
config.add_route(
@@ -111,3 +115,7 @@ def includeme(config):
"/pypi/{name}/{version}/",
"/project/{name}/{version}/",
)
+
+ # Legacy Action Redirects
+ config.add_pypi_action_redirect("rss", "/rss/updates.xml")
+ config.add_pypi_action_redirect("packages_rss", "/rss/packages.xml")
diff --git a/warehouse/rss/__init__.py b/warehouse/rss/__init__.py
new file mode 100644
--- /dev/null
+++ b/warehouse/rss/__init__.py
@@ -0,0 +1,11 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/warehouse/rss/views.py b/warehouse/rss/views.py
new file mode 100644
--- /dev/null
+++ b/warehouse/rss/views.py
@@ -0,0 +1,68 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pyramid.view import view_config
+from sqlalchemy.orm import joinedload
+
+from warehouse.cache.origin import origin_cache
+from warehouse.packaging.models import Project, Release
+
+
+@view_config(
+ route_name="rss.updates",
+ renderer="rss/updates.xml",
+ decorator=[
+ origin_cache(
+ 1 * 24 * 60 * 60, # 1 day
+ stale_while_revalidate=1 * 24 * 60 * 60, # 1 day
+ stale_if_error=5 * 24 * 60 * 60, # 5 days
+ ),
+ ],
+)
+def rss_updates(request):
+ request.response.content_type = "text/xml"
+
+ latest_releases = (
+ request.db.query(Release)
+ .options(joinedload(Release.project))
+ .order_by(Release.created.desc())
+ .limit(40)
+ .all()
+ )
+
+ return {"latest_releases": latest_releases}
+
+
+@view_config(
+ route_name="rss.packages",
+ renderer="rss/packages.xml",
+ decorator=[
+ origin_cache(
+ 1 * 24 * 60 * 60, # 1 day
+ stale_while_revalidate=1 * 24 * 60 * 60, # 1 day
+ stale_if_error=5 * 24 * 60 * 60, # 5 days
+ ),
+ ],
+)
+def rss_packages(request):
+ request.response.content_type = "text/xml"
+
+ newest_projects = (
+ request.db.query(Project)
+ .options(joinedload(Project.releases))
+ .order_by(Project.created.desc())
+ .filter(Project.releases.any())
+ .limit(40)
+ .all()
+ )
+
+ return {"newest_projects": newest_projects}
| diff --git a/tests/unit/i18n/test_filters.py b/tests/unit/i18n/test_filters.py
--- a/tests/unit/i18n/test_filters.py
+++ b/tests/unit/i18n/test_filters.py
@@ -11,6 +11,7 @@
# limitations under the License.
import babel.dates
+import email.utils
import pretend
from warehouse.i18n import filters
@@ -48,3 +49,17 @@ def test_format_datetime(monkeypatch):
kwargs.update({"locale": request.locale})
assert format_datetime.calls == [pretend.call(*args, **kwargs)]
+
+
+def test_format_rfc822_datetime(monkeypatch):
+ formatted = pretend.stub()
+ formatdate = pretend.call_recorder(lambda *a, **kw: formatted)
+ monkeypatch.setattr(email.utils, "formatdate", formatdate)
+
+ ctx = pretend.stub()
+ timestamp = pretend.stub()
+ args = [pretend.stub(timestamp=lambda: timestamp), pretend.stub()]
+ kwargs = {"foo": pretend.stub()}
+
+ assert filters.format_rfc822_datetime(ctx, *args, **kwargs) is formatted
+ assert formatdate.calls == [pretend.call(timestamp, usegmt=True)]
diff --git a/tests/unit/i18n/test_init.py b/tests/unit/i18n/test_init.py
--- a/tests/unit/i18n/test_init.py
+++ b/tests/unit/i18n/test_init.py
@@ -43,6 +43,8 @@ def test_includeme():
"jinja2.filters": {
"format_date": "warehouse.i18n.filters:format_date",
"format_datetime": "warehouse.i18n.filters:format_datetime",
+ "format_rfc822_datetime":
+ "warehouse.i18n.filters:format_rfc822_datetime",
},
"jinja2.globals": {
"l20n": "warehouse.i18n.l20n:l20n",
diff --git a/tests/unit/legacy/test_action_routing.py b/tests/unit/legacy/test_action_routing.py
--- a/tests/unit/legacy/test_action_routing.py
+++ b/tests/unit/legacy/test_action_routing.py
@@ -58,4 +58,9 @@ def test_includeme():
action_routing.add_pypi_action_route,
action_wrap=False,
),
+ pretend.call(
+ "add_pypi_action_redirect",
+ action_routing.add_pypi_action_redirect,
+ action_wrap=False,
+ ),
]
diff --git a/tests/unit/rss/__init__.py b/tests/unit/rss/__init__.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/rss/__init__.py
@@ -0,0 +1,11 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/tests/unit/rss/test_views.py b/tests/unit/rss/test_views.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/rss/test_views.py
@@ -0,0 +1,51 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+
+from warehouse.rss import views as rss
+from ...common.db.packaging import ProjectFactory, ReleaseFactory
+
+
+def test_rss_updates(db_request):
+ project1 = ProjectFactory.create()
+ project2 = ProjectFactory.create()
+
+ release1 = ReleaseFactory.create(project=project1)
+ release1.created = datetime.date(2011, 1, 1)
+ release2 = ReleaseFactory.create(project=project2)
+ release2.created = datetime.date(2012, 1, 1)
+ release3 = ReleaseFactory.create(project=project1)
+ release3.created = datetime.date(2013, 1, 1)
+
+ assert rss.rss_updates(db_request) == {
+ "latest_releases": [release3, release2, release1],
+ }
+ assert db_request.response.content_type == "text/xml"
+
+
+def test_rss_packages(db_request):
+ project1 = ProjectFactory.create()
+ project1.created = datetime.date(2011, 1, 1)
+ ReleaseFactory.create(project=project1)
+
+ project2 = ProjectFactory.create()
+ project2.created = datetime.date(2012, 1, 1)
+
+ project3 = ProjectFactory.create()
+ project3.created = datetime.date(2013, 1, 1)
+ ReleaseFactory.create(project=project3)
+
+ assert rss.rss_packages(db_request) == {
+ "newest_projects": [project3, project1],
+ }
+ assert db_request.response.content_type == "text/xml"
diff --git a/tests/unit/test_redirects.py b/tests/unit/test_redirects.py
--- a/tests/unit/test_redirects.py
+++ b/tests/unit/test_redirects.py
@@ -34,21 +34,28 @@ def test_add_redirect(monkeypatch):
monkeypatch.setattr(redirects, "redirect_view_factory", rview_factory)
config = pretend.stub(
- add_route=pretend.call_recorder(lambda name, route: None),
+ add_route=pretend.call_recorder(lambda name, route, **kw: None),
add_view=pretend.call_recorder(lambda view, route_name: None),
)
source = "/the/{thing}/"
target = "/other/{thing}/"
redirect = pretend.stub()
+ kwargs = {
+ 'redirect': redirect,
+ }
- redirects.add_redirect(config, source, target, redirect=redirect)
+ redirects.add_redirect(config, source, target, **kwargs)
assert config.add_route.calls == [
- pretend.call("warehouse.redirects." + source, source),
+ pretend.call(
+ "warehouse.redirects." + source + str(kwargs), source, **kwargs
+ ),
]
assert config.add_view.calls == [
- pretend.call(rview, route_name="warehouse.redirects." + source),
+ pretend.call(
+ rview, route_name="warehouse.redirects." + source + str(kwargs)
+ ),
]
assert rview_factory.calls == [pretend.call(target, redirect=redirect)]
diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py
--- a/tests/unit/test_routes.py
+++ b/tests/unit/test_routes.py
@@ -38,6 +38,11 @@ def add_redirect(*args, **kwargs):
def add_pypi_action_route(name, action, **kwargs):
pass
+ @staticmethod
+ @pretend.call_recorder
+ def add_pypi_action_redirect(action, target, **kwargs):
+ pass
+
@staticmethod
@pretend.call_recorder
def add_xmlrpc_endpoint(endpoint, pattern, header, read_only=False):
@@ -85,6 +90,8 @@ def add_xmlrpc_endpoint(endpoint, pattern, header, read_only=False):
read_only=True,
),
pretend.call("packaging.file", "/packages/{path:.*}", read_only=True),
+ pretend.call("rss.updates", "/rss/updates.xml", read_only=True),
+ pretend.call("rss.packages", "/rss/packages.xml", read_only=True),
pretend.call("legacy.api.simple.index", "/simple/", read_only=True),
pretend.call(
"legacy.api.simple.detail",
@@ -123,6 +130,11 @@ def add_xmlrpc_endpoint(endpoint, pattern, header, read_only=False):
pretend.call("legacy.api.pypi.doap", "doap"),
]
+ assert config.add_pypi_action_redirect.calls == [
+ pretend.call("rss", "/rss/updates.xml"),
+ pretend.call("packages_rss", "/rss/packages.xml"),
+ ]
+
assert config.add_xmlrpc_endpoint.calls == [
pretend.call(
"pypi",
| Implement Legacy RSS Feeds
Determine if we need/still want the legacy RSS feeds and if so implement them (both "last hour" and "packages").
| 2016-02-29T22:22:21Z | [] | [] |
|
pypi/warehouse | 1,020 | pypi__warehouse-1020 | [
"1019"
] | 048ab9cafb89bd0d6e82399eb0aea65e738ba650 | diff --git a/warehouse/views.py b/warehouse/views.py
--- a/warehouse/views.py
+++ b/warehouse/views.py
@@ -159,7 +159,7 @@ def search(request):
"multi_match",
query=request.params["q"],
fields=[
- "name", "version", "author", "author_email", "maintainer",
+ "name^2", "version", "author", "author_email", "maintainer",
"maintainer_email", "home_page", "license", "summary",
"description", "keywords", "platform", "download_url",
],
| diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py
--- a/tests/unit/test_views.py
+++ b/tests/unit/test_views.py
@@ -133,9 +133,10 @@ def test_with_a_query(self, monkeypatch, page):
"multi_match",
query="foo bar",
fields=[
- "name", "version", "author", "author_email", "maintainer",
- "maintainer_email", "home_page", "license", "summary",
- "description", "keywords", "platform", "download_url",
+ "name^2", "version", "author", "author_email",
+ "maintainer", "maintainer_email", "home_page", "license",
+ "summary", "description", "keywords", "platform",
+ "download_url",
],
),
]
@@ -188,9 +189,10 @@ def test_with_an_ordering(self, monkeypatch, page):
"multi_match",
query="foo bar",
fields=[
- "name", "version", "author", "author_email", "maintainer",
- "maintainer_email", "home_page", "license", "summary",
- "description", "keywords", "platform", "download_url",
+ "name^2", "version", "author", "author_email",
+ "maintainer", "maintainer_email", "home_page", "license",
+ "summary", "description", "keywords", "platform",
+ "download_url",
],
),
]
| Search results seem to need some relevancy tweaking
Searches seem to have some relevancy issues. For example:
![2016-03-17 at 11 13 am 1](https://cloud.githubusercontent.com/assets/21148/13856330/e99facd8-ec31-11e5-81a8-ff1033f1df67.png)
Or: https://warehouse.python.org/search/?q=django&page=1 - Django itself doesn't seem to appear in the first half-dozen or so pages (I gave up paging before I found it).
Jacob
| 2016-03-17T18:54:48Z | [] | [] |
|
pypi/warehouse | 1,021 | pypi__warehouse-1021 | [
"1012"
] | 048ab9cafb89bd0d6e82399eb0aea65e738ba650 | diff --git a/warehouse/accounts/forms.py b/warehouse/accounts/forms.py
--- a/warehouse/accounts/forms.py
+++ b/warehouse/accounts/forms.py
@@ -9,14 +9,15 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+import re
import wtforms
+import wtforms.fields.html5
-from warehouse import forms
+from warehouse import forms, recaptcha
-class LoginForm(forms.Form):
-
+class CredentialsMixin:
username = wtforms.StringField(
validators=[
wtforms.validators.DataRequired(),
@@ -30,19 +31,89 @@ class LoginForm(forms.Form):
],
)
- def __init__(self, *args, login_service, **kwargs):
+ def __init__(self, *args, user_service, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.user_service = user_service
+
+
+# XXX: This is a naive password strength validator, but something that can
+# easily be replicated in JS for client-side feedback.
+# see: https://github.com/pypa/warehouse/issues/6
+PWD_MIN_LEN = 8
+PWD_RE = re.compile(r"""
+^ # start
+(?=.*[A-Z]+.*) # >= 1 upper case
+(?=.*[a-z]+.*) # >= 1 lower case
+(?=.*[0-9]+.*) # >= 1 number
+(?=.*[.*~`\!@#$%^&\*\(\)_+-={}|\[\]\\:";'<>?,\./]+.*) # >= 1 special char
+.{""" + str(PWD_MIN_LEN) + """,} # >= 8 chars
+$ # end
+""", re.X)
+
+
+class RegistrationForm(CredentialsMixin, forms.Form):
+ password_confirm = wtforms.PasswordField(
+ validators=[
+ wtforms.validators.DataRequired(),
+ wtforms.validators.EqualTo(
+ "password", "Passwords must match."
+ ),
+ ],
+ )
+
+ full_name = wtforms.StringField()
+
+ email = wtforms.fields.html5.EmailField(
+ validators=[
+ wtforms.validators.DataRequired(),
+ wtforms.validators.Email(),
+ ],
+ )
+
+ g_recaptcha_response = wtforms.StringField()
+
+ def __init__(self, *args, recaptcha_service, **kwargs):
super().__init__(*args, **kwargs)
+ self.recaptcha_service = recaptcha_service
+
+ def validate_username(self, field):
+ if self.user_service.find_userid(field.data) is not None:
+ raise wtforms.validators.ValidationError(
+ "Username exists.")
+
+ def validate_email(self, field):
+ if self.user_service.find_userid_by_email(field.data) is not None:
+ raise wtforms.validators.ValidationError("Email exists.")
+
+ def validate_g_recaptcha_response(self, field):
+ # do required data validation here due to enabled flag being required
+ if self.recaptcha_service.enabled and not field.data:
+ raise wtforms.validators.ValidationError("Recaptcha error.")
+ try:
+ self.recaptcha_service.verify_response(field.data)
+ except recaptcha.RecaptchaError:
+ # TODO: log error
+ # don't want to provide the user with any detail
+ raise wtforms.validators.ValidationError("Recaptcha error.")
+
+ def validate_password(self, field):
+ if not PWD_RE.match(field.data):
+ raise wtforms.validators.ValidationError(
+ "Password must contain an upper case letter, a lower case "
+ "letter, a number, a special character and be at least "
+ "%d characters in length" % PWD_MIN_LEN
+ )
- self.login_service = login_service
+class LoginForm(CredentialsMixin, forms.Form):
def validate_username(self, field):
- userid = self.login_service.find_userid(field.data)
+ userid = self.user_service.find_userid(field.data)
if userid is None:
raise wtforms.validators.ValidationError("Invalid user.")
def validate_password(self, field):
- userid = self.login_service.find_userid(self.username.data)
+ userid = self.user_service.find_userid(self.username.data)
if userid is not None:
- if not self.login_service.check_password(userid, field.data):
+ if not self.user_service.check_password(userid, field.data):
raise wtforms.validators.ValidationError("Invalid password.")
diff --git a/warehouse/accounts/services.py b/warehouse/accounts/services.py
--- a/warehouse/accounts/services.py
+++ b/warehouse/accounts/services.py
@@ -56,6 +56,20 @@ def find_userid(self, username):
return user.id
+ @functools.lru_cache()
+ def find_userid_by_email(self, email):
+ try:
+ # flake8: noqa
+ user_id = (
+ self.db.query(Email.user_id)
+ .filter(Email.email == email)
+ .one()
+ )[0]
+ except NoResultFound:
+ return
+
+ return user_id
+
def check_password(self, userid, password):
user = self.get_user(userid)
if user is None:
@@ -78,9 +92,10 @@ def check_password(self, userid, password):
def create_user(self, username, name, password, email,
is_active=False, is_staff=False, is_superuser=False):
+
user = User(username=username,
name=name,
- password=password,
+ password=self.hasher.encrypt(password),
is_active=is_active,
is_staff=is_staff,
is_superuser=is_superuser)
@@ -89,6 +104,8 @@ def create_user(self, username, name, password, email,
email_object = Email(email=email, user=user,
primary=True, verified=False)
self.db.add(email_object)
+ # flush the db now so user.id is available
+ self.db.flush()
return user
def update_user(self, user_id, **changes):
diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py
--- a/warehouse/accounts/views.py
+++ b/warehouse/accounts/views.py
@@ -16,7 +16,7 @@
from sqlalchemy.orm import joinedload
from warehouse.accounts import REDIRECT_FIELD_NAME
-from warehouse.accounts.forms import LoginForm
+from warehouse.accounts import forms
from warehouse.accounts.interfaces import IUserService
from warehouse.cache.origin import origin_cache
from warehouse.csrf import csrf_protect
@@ -61,54 +61,22 @@ def profile(user, request):
decorator=[csrf_protect("accounts.login"), uses_session],
)
def login(request, redirect_field_name=REDIRECT_FIELD_NAME,
- _form_class=LoginForm):
+ _form_class=forms.LoginForm):
# TODO: Logging in should reset request.user
# TODO: Configure the login view as the default view for not having
# permission to view something.
- login_service = request.find_service(IUserService, context=None)
+ user_service = request.find_service(IUserService, context=None)
redirect_to = request.POST.get(redirect_field_name,
request.GET.get(redirect_field_name))
- form = _form_class(request.POST, login_service=login_service)
+ form = _form_class(request.POST, user_service=user_service)
if request.method == "POST" and form.validate():
# Get the user id for the given username.
username = form.username.data
- userid = login_service.find_userid(username)
-
- # We have a session factory associated with this request, so in order
- # to protect against session fixation attacks we're going to make sure
- # that we create a new session (which for sessions with an identifier
- # will cause it to get a new session identifier).
-
- # We need to protect against session fixation attacks, so make sure
- # that we create a new session (which will cause it to get a new
- # session identifier).
- if (request.unauthenticated_userid is not None and
- request.unauthenticated_userid != userid):
- # There is already a userid associated with this request and it is
- # a different userid than the one we're trying to remember now. In
- # this case we want to drop the existing session completely because
- # we don't want to leak any data between authenticated userids.
- request.session.invalidate()
- else:
- # We either do not have an associated userid with this request
- # already, or the userid is the same one we're trying to remember
- # now. In either case we want to keep all of the data but we want
- # to make sure that we create a new session since we're crossing
- # a privilege boundary.
- data = dict(request.session.items())
- request.session.invalidate()
- request.session.update(data)
-
- # Remember the userid using the authentication policy.
- headers = remember(request, userid)
-
- # Cycle the CSRF token since we've crossed an authentication boundary
- # and we don't want to continue using the old one.
- request.session.new_csrf_token()
+ userid = user_service.find_userid(username)
# If the user-originating redirection url is not safe, then redirect to
# the index instead.
@@ -116,6 +84,7 @@ def login(request, redirect_field_name=REDIRECT_FIELD_NAME,
not is_safe_url(url=redirect_to, host=request.host)):
redirect_to = "/"
+ headers = _login_user(request, userid)
# Now that we're logged in we'll want to redirect the user to either
# where they were trying to go originally, or to the default view.
return HTTPSeeOther(redirect_to, headers=dict(headers))
@@ -169,3 +138,77 @@ def logout(request, redirect_field_name=REDIRECT_FIELD_NAME):
return HTTPSeeOther(redirect_to, headers=dict(headers))
return {"redirect": {"field": REDIRECT_FIELD_NAME, "data": redirect_to}}
+
+
+@view_config(
+ route_name="accounts.register",
+ renderer="accounts/register.html",
+ decorator=[csrf_protect("accounts.register"), uses_session],
+)
+def register(request, _form_class=forms.RegistrationForm):
+ if request.authenticated_userid is not None:
+ return HTTPSeeOther("/")
+
+ user_service = request.find_service(IUserService, context=None)
+ recaptcha_service = request.find_service(name="recaptcha")
+ request.find_service(name="csp").merge(recaptcha_service.csp_policy)
+
+ # the form contains an auto-generated field from recaptcha with
+ # hyphens in it. make it play nice with wtforms.
+ post_body = {
+ key.replace("-", "_"): value
+ for key, value in request.POST.items()
+ }
+
+ form = _form_class(
+ data=post_body, user_service=user_service,
+ recaptcha_service=recaptcha_service
+ )
+
+ if request.method == "POST" and form.validate():
+ user = user_service.create_user(
+ form.username.data, form.full_name.data, form.password.data,
+ form.email.data
+ )
+
+ return HTTPSeeOther(
+ request.route_path("index"),
+ headers=dict(_login_user(request, user.id)))
+
+ return {"form": form}
+
+
+def _login_user(request, userid):
+ # We have a session factory associated with this request, so in order
+ # to protect against session fixation attacks we're going to make sure
+ # that we create a new session (which for sessions with an identifier
+ # will cause it to get a new session identifier).
+
+ # We need to protect against session fixation attacks, so make sure
+ # that we create a new session (which will cause it to get a new
+ # session identifier).
+ if (request.unauthenticated_userid is not None and
+ request.unauthenticated_userid != userid):
+ # There is already a userid associated with this request and it is
+ # a different userid than the one we're trying to remember now. In
+ # this case we want to drop the existing session completely because
+ # we don't want to leak any data between authenticated userids.
+ request.session.invalidate()
+ else:
+ # We either do not have an associated userid with this request
+ # already, or the userid is the same one we're trying to remember
+ # now. In either case we want to keep all of the data but we want
+ # to make sure that we create a new session since we're crossing
+ # a privilege boundary.
+ data = dict(request.session.items())
+ request.session.invalidate()
+ request.session.update(data)
+
+ # Remember the userid using the authentication policy.
+ headers = remember(request, userid)
+
+ # Cycle the CSRF token since we've crossed an authentication boundary
+ # and we don't want to continue using the old one.
+ request.session.new_csrf_token()
+
+ return headers
diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -300,8 +300,6 @@ def configure(settings=None):
# Register all our URL routes for Warehouse.
config.include(".routes")
- config.include(".csp")
-
# Block non HTTPS requests for the legacy ?:action= routes when they are
# sent via POST.
config.add_tween("warehouse.config.require_https_tween_factory")
@@ -353,6 +351,19 @@ def configure(settings=None):
# most WSGI middleware.
config.include(".raven")
+ # Register Content-Security-Policy service
+ config.include(".csp")
+
+ # Register recaptcha service
+ config.include(".recaptcha")
+
+ config.add_settings({
+ "http": {
+ "verify": "/etc/ssl/certs/",
+ },
+ })
+ config.include(".http")
+
# Add our theme if one was configured
if config.get_settings().get("warehouse.theme"):
config.include(config.get_settings()["warehouse.theme"])
diff --git a/warehouse/csp.py b/warehouse/csp.py
--- a/warehouse/csp.py
+++ b/warehouse/csp.py
@@ -31,8 +31,20 @@ def content_security_policy_tween(request):
return content_security_policy_tween
+class CSPPolicy(collections.defaultdict):
+ def __init__(self, policy=None):
+ super().__init__(list, policy or {})
+
+ def merge(self, policy):
+ for key, attrs in policy.items():
+ self[key].extend(attrs)
+
+
def csp_factory(_, request):
- return copy.deepcopy(request.registry.settings.get("csp", {}))
+ try:
+ return CSPPolicy(copy.deepcopy(request.registry.settings["csp"]))
+ except KeyError:
+ return CSPPolicy({})
def includeme(config):
diff --git a/warehouse/http.py b/warehouse/http.py
new file mode 100644
--- /dev/null
+++ b/warehouse/http.py
@@ -0,0 +1,32 @@
+import threading
+import requests
+
+
+class ThreadLocalSessionFactory:
+ def __init__(self, config=None):
+ self.config = config
+ self._local = threading.local()
+
+ def __call__(self, request):
+ try:
+ session = self._local.session
+ request.log.debug("reusing existing session")
+ return session
+ except AttributeError:
+ request.log.debug("creating new session")
+ session = requests.Session()
+
+ if self.config is not None:
+ for attr, val in self.config.items():
+ assert hasattr(session, attr)
+ setattr(session, attr, val)
+
+ self._local.session = session
+ return session
+
+
+def includeme(config):
+ config.add_request_method(
+ ThreadLocalSessionFactory(config.registry.settings.get("http")),
+ name="http", reify=True
+ )
diff --git a/warehouse/logging.py b/warehouse/logging.py
--- a/warehouse/logging.py
+++ b/warehouse/logging.py
@@ -11,6 +11,7 @@
# limitations under the License.
import logging.config
+import threading
import uuid
import structlog
@@ -35,6 +36,7 @@ def format(self, record):
"logger": record.name,
"level": record.levelname,
"event": record.msg,
+ "thread": threading.get_ident(),
}
record.msg = RENDERER(None, None, event_dict)
diff --git a/warehouse/recaptcha.py b/warehouse/recaptcha.py
new file mode 100644
--- /dev/null
+++ b/warehouse/recaptcha.py
@@ -0,0 +1,142 @@
+import collections
+import http
+from os import environ
+from urllib.parse import urlencode
+
+
+VERIFY_URL = "https://www.google.com/recaptcha/api/siteverify"
+
+
+# flake8: noqa
+class RecaptchaError(ValueError): pass
+class MissingInputSecretError(RecaptchaError): pass
+class InvalidInputSecretError(RecaptchaError): pass
+class MissingInputResponseError(RecaptchaError): pass
+class InvalidInputResponseError(RecaptchaError): pass
+class UnexpectedError(RecaptchaError): pass
+
+ERROR_CODE_MAP = {
+ "missing-input-secret": MissingInputSecretError,
+ "invalid-input-secret": InvalidInputSecretError,
+ "missing-input-response": MissingInputResponseError,
+ "invalid-input-response": InvalidInputResponseError,
+}
+
+ChallengeResponse = collections.namedtuple(
+ "ChallengeResponse", ("challenge_ts", "hostname")
+)
+
+
+class Service:
+ def __init__(self, request):
+ self.request = request
+
+ @property
+ def csp_policy(self):
+ # the use of request.scheme should ever only be for dev. problem is
+ # that we use "//" in the script tags, so the request scheme is used.
+ # because the csp has to match the script src scheme, it also has to
+ # be dynamic.
+ return {
+ "script-src": [
+ "%s://www.google.com/recaptcha/" % self.request.scheme,
+ "%s://www.gstatic.com/recaptcha/" % self.request.scheme,
+ ],
+ "frame-src": [
+ "%s://www.google.com/recaptcha/" % self.request.scheme,
+ ],
+ "style-src": [
+ "'unsafe-inline'",
+ ],
+ }
+
+ @property
+ def enabled(self):
+ settings = self.request.registry.settings.get("recaptcha", {})
+ return len(settings.get("site_key", "")) > 0 and len(
+ settings.get("secret_key", "")) > 0
+
+ def verify_response(self, response, remote_ip=None):
+ if not self.enabled:
+ # TODO: debug logging
+ return
+
+ settings = self.request.registry.settings["recaptcha"]
+
+ payload = {
+ "secret": settings["secret_key"],
+ "response": response,
+ }
+ if remote_ip is not None:
+ payload["remoteip"] = remote_ip
+
+ try:
+ # TODO: the timeout is hardcoded for now. it would be nice to do
+ # something a little more generalized in the future.
+ resp = self.request.http.post(
+ VERIFY_URL, urlencode(payload),
+ headers={"Content-Type":
+ "application/x-www-form-urlencoded; charset=utf-8"},
+ timeout=10
+ )
+ except Exception as err:
+ raise UnexpectedError(str(err))
+
+ try:
+ data = resp.json()
+ except ValueError:
+ raise UnexpectedError(
+ "Unexpected data in response body: %s" % str(
+ resp.content, 'utf-8')
+ )
+
+ try:
+ success = data["success"]
+ except KeyError:
+ raise UnexpectedError(
+ "Missing 'success' key in response: %s" % data
+ )
+
+ if resp.status_code != http.HTTPStatus.OK or not data["success"]:
+ try:
+ error_codes = data["error_codes"]
+ except KeyError:
+ raise UnexpectedError(
+ "Response missing 'error-codes' key: %s" % data
+ )
+ try:
+ exc_tp = ERROR_CODE_MAP[error_codes[0]]
+ except KeyError:
+ raise UnexpectedError(
+ "Unexpected error code: %s" % error_codes[0]
+ )
+ raise exc_tp
+
+ # challenge_ts = timestamp of the challenge load
+ # (ISO format yyyy-MM-dd'T'HH:mm:ssZZ)
+ # TODO: maybe run some validation against the hostname and timestamp?
+ # TODO: log if either field is empty.. it shouldn't cause a failure,
+ # but it likely means that google has changed their response structure
+ return ChallengeResponse(
+ data.get("challenge_ts"),
+ data.get("hostname"),
+ )
+
+
+def service_factory(handler, request):
+ return Service(request)
+
+
+def includeme(config):
+ # yeah yeah, binding to a concrete implementation rather than an
+ # interface. in a perfect world, this will never be offloaded to another
+ # service. however, if it is, then we'll deal with the refactor then
+ config.register_service_factory(service_factory, name="recaptcha")
+
+ # key-less recaptcha config will work on localhost, but not prod
+ config.add_settings({
+ "recaptcha": {
+ "site_key": environ.get("RECAPTCHA_SITE_KEY"),
+ "secret_key": environ.get("RECAPTCHA_SECRET_KEY"),
+ },
+ })
diff --git a/warehouse/routes.py b/warehouse/routes.py
--- a/warehouse/routes.py
+++ b/warehouse/routes.py
@@ -42,6 +42,7 @@ def includeme(config):
)
config.add_route("accounts.login", "/account/login/")
config.add_route("accounts.logout", "/account/logout/")
+ config.add_route("accounts.register", "/account/register/")
# Packaging
config.add_route(
| diff --git a/requirements/tests.txt b/requirements/tests.txt
--- a/requirements/tests.txt
+++ b/requirements/tests.txt
@@ -4,4 +4,5 @@ freezegun
pretend
pytest
pytest-dbfixtures>=0.9.0
+responses>=0.5.1
webtest
diff --git a/tests/unit/accounts/test_forms.py b/tests/unit/accounts/test_forms.py
--- a/tests/unit/accounts/test_forms.py
+++ b/tests/unit/accounts/test_forms.py
@@ -14,87 +14,264 @@
import pytest
import wtforms
-from warehouse.accounts.forms import LoginForm
+from warehouse.accounts import forms
+from warehouse import recaptcha
class TestLoginForm:
def test_creation(self):
- login_service = pretend.stub()
- form = LoginForm(login_service=login_service)
+ user_service = pretend.stub()
+ form = forms.LoginForm(user_service=user_service)
- assert form.login_service is login_service
+ assert form.user_service is user_service
def test_validate_username_with_no_user(self):
- login_service = pretend.stub(
+ user_service = pretend.stub(
find_userid=pretend.call_recorder(lambda userid: None),
)
- form = LoginForm(login_service=login_service)
+ form = forms.LoginForm(user_service=user_service)
field = pretend.stub(data="my_username")
with pytest.raises(wtforms.validators.ValidationError):
form.validate_username(field)
- assert login_service.find_userid.calls == [pretend.call("my_username")]
+ assert user_service.find_userid.calls == [pretend.call("my_username")]
def test_validate_username_with_user(self):
- login_service = pretend.stub(
+ user_service = pretend.stub(
find_userid=pretend.call_recorder(lambda userid: 1),
)
- form = LoginForm(login_service=login_service)
+ form = forms.LoginForm(user_service=user_service)
field = pretend.stub(data="my_username")
form.validate_username(field)
- assert login_service.find_userid.calls == [pretend.call("my_username")]
+ assert user_service.find_userid.calls == [pretend.call("my_username")]
def test_validate_password_no_user(self):
- login_service = pretend.stub(
+ user_service = pretend.stub(
find_userid=pretend.call_recorder(lambda userid: None),
)
- form = LoginForm(
+ form = forms.LoginForm(
data={"username": "my_username"},
- login_service=login_service,
+ user_service=user_service,
)
field = pretend.stub(data="password")
form.validate_password(field)
- assert login_service.find_userid.calls == [pretend.call("my_username")]
+ assert user_service.find_userid.calls == [pretend.call("my_username")]
def test_validate_password_ok(self):
- login_service = pretend.stub(
+ user_service = pretend.stub(
find_userid=pretend.call_recorder(lambda userid: 1),
check_password=pretend.call_recorder(
lambda userid, password: True
),
)
- form = LoginForm(
+ form = forms.LoginForm(
data={"username": "my_username"},
- login_service=login_service,
+ user_service=user_service,
)
field = pretend.stub(data="pw")
form.validate_password(field)
- assert login_service.find_userid.calls == [pretend.call("my_username")]
- assert login_service.check_password.calls == [pretend.call(1, "pw")]
+ assert user_service.find_userid.calls == [pretend.call("my_username")]
+ assert user_service.check_password.calls == [pretend.call(1, "pw")]
def test_validate_password_notok(self, db_session):
- login_service = pretend.stub(
+ user_service = pretend.stub(
find_userid=pretend.call_recorder(lambda userid: 1),
check_password=pretend.call_recorder(
lambda userid, password: False
),
)
- form = LoginForm(
+ form = forms.LoginForm(
data={"username": "my_username"},
- login_service=login_service,
+ user_service=user_service,
)
field = pretend.stub(data="pw")
with pytest.raises(wtforms.validators.ValidationError):
form.validate_password(field)
- assert login_service.find_userid.calls == [pretend.call("my_username")]
- assert login_service.check_password.calls == [pretend.call(1, "pw")]
+ assert user_service.find_userid.calls == [pretend.call("my_username")]
+ assert user_service.check_password.calls == [pretend.call(1, "pw")]
+
+
+class TestRegistrationForm:
+ def test_create(self):
+ user_service = pretend.stub()
+ recaptcha_service = pretend.stub(enabled=True)
+
+ form = forms.RegistrationForm(
+ data={}, user_service=user_service,
+ recaptcha_service=recaptcha_service
+ )
+ assert form.user_service is user_service
+ assert form.recaptcha_service is recaptcha_service
+
+ def test_password_confirm_required_error(self):
+ form = forms.RegistrationForm(
+ data={"password_confirm": ""},
+ user_service=pretend.stub(
+ find_userid_by_email=pretend.call_recorder(
+ lambda _: pretend.stub()
+ ),
+ ),
+ recaptcha_service=pretend.stub(enabled=True),
+ )
+
+ assert not form.validate()
+ assert form.password_confirm.errors.pop() == "This field is required."
+
+ def test_passwords_mismatch_error(self):
+ user_service = pretend.stub(
+ find_userid_by_email=pretend.call_recorder(
+ lambda _: pretend.stub()
+ ),
+ )
+ form = forms.RegistrationForm(
+ data={
+ "password": "password",
+ "password_confirm": "mismatch",
+ },
+ user_service=user_service,
+ recaptcha_service=pretend.stub(enabled=True),
+ )
+
+ assert not form.validate()
+ assert form.password_confirm.errors.pop() == "Passwords must match."
+
+ def test_passwords_match_success(self):
+ user_service = pretend.stub(
+ find_userid_by_email=pretend.call_recorder(
+ lambda _: pretend.stub()
+ ),
+ )
+ form = forms.RegistrationForm(
+ data={
+ "password": "MyStr0ng!shPassword",
+ "password_confirm": "MyStr0ng!shPassword",
+ },
+ user_service=user_service,
+ recaptcha_service=pretend.stub(enabled=True),
+ )
+
+ form.validate()
+ assert len(form.password.errors) == 0
+ assert len(form.password_confirm.errors) == 0
+
+ def test_email_required_error(self):
+ form = forms.RegistrationForm(
+ data={"email": ""},
+ user_service=pretend.stub(
+ find_userid_by_email=pretend.call_recorder(
+ lambda _: pretend.stub()
+ ),
+ ),
+ recaptcha_service=pretend.stub(enabled=True),
+ )
+
+ assert not form.validate()
+ assert form.email.errors.pop() == "This field is required."
+
+ def test_invalid_email_error(self):
+ form = forms.RegistrationForm(
+ data={"email": "bad"},
+ user_service=pretend.stub(
+ find_userid_by_email=pretend.call_recorder(lambda _: None),
+ ),
+ recaptcha_service=pretend.stub(enabled=True),
+ )
+
+ assert not form.validate()
+ assert form.email.errors.pop() == "Invalid email address."
+
+ def test_email_exists_error(self):
+ form = forms.RegistrationForm(
+ data={"email": "foo@bar.com"},
+ user_service=pretend.stub(
+ find_userid_by_email=pretend.call_recorder(
+ lambda _: pretend.stub()
+ ),
+ ),
+ recaptcha_service=pretend.stub(enabled=True),
+ )
+
+ assert not form.validate()
+ assert form.email.errors.pop() == "Email exists."
+
+ def test_recaptcha_disabled(self):
+ form = forms.RegistrationForm(
+ data={"g_recpatcha_response": ""},
+ user_service=pretend.stub(),
+ recaptcha_service=pretend.stub(
+ enabled=False,
+ verify_response=pretend.call_recorder(lambda _: None),
+ ),
+ )
+ assert not form.validate()
+ # there shouldn't be any errors for the recaptcha field if it's
+ # disabled
+ assert not form.g_recaptcha_response.errors
+
+ def test_recaptcha_required_error(self):
+ form = forms.RegistrationForm(
+ data={"g_recaptcha_response": ""},
+ user_service=pretend.stub(),
+ recaptcha_service=pretend.stub(
+ enabled=True,
+ verify_response=pretend.call_recorder(lambda _: None),
+ ),
+ )
+ assert not form.validate()
+ assert form.g_recaptcha_response.errors.pop() \
+ == "Recaptcha error."
+
+ def test_recaptcha_error(self):
+ form = forms.RegistrationForm(
+ data={"g_recaptcha_response": "asd"},
+ user_service=pretend.stub(),
+ recaptcha_service=pretend.stub(
+ verify_response=pretend.raiser(recaptcha.RecaptchaError),
+ enabled=True,
+ ),
+ )
+ assert not form.validate()
+ assert form.g_recaptcha_response.errors.pop() \
+ == "Recaptcha error."
+
+ def test_username_exists(self):
+ form = forms.RegistrationForm(
+ data={"username": "foo"},
+ user_service=pretend.stub(
+ find_userid=pretend.call_recorder(lambda name: 1),
+ ),
+ recaptcha_service=pretend.stub(
+ enabled=False,
+ verify_response=pretend.call_recorder(lambda _: None),
+ ),
+ )
+ assert not form.validate()
+ assert form.username.errors.pop() == "Username exists."
+
+ def test_password_strength(self):
+ cases = (
+ ("foobar", False),
+ ("somethingalittlebetter9", False),
+ ("1aDeCent!1", True),
+ )
+ for pwd, valid in cases:
+ form = forms.RegistrationForm(
+ data={"password": pwd, "password_confirm": pwd},
+ user_service=pretend.stub(),
+ recaptcha_service=pretend.stub(
+ enabled=False,
+ verify_response=pretend.call_recorder(lambda _: None),
+ ),
+ )
+ form.validate()
+ assert (len(form.password.errors) == 0) == valid
diff --git a/tests/unit/accounts/test_services.py b/tests/unit/accounts/test_services.py
--- a/tests/unit/accounts/test_services.py
+++ b/tests/unit/accounts/test_services.py
@@ -116,7 +116,6 @@ def test_create_user(self, db_session):
user_from_db = service.get_user(new_user.id)
assert user_from_db.username == user.username
assert user_from_db.name == user.name
- assert user_from_db.password == user.password
assert user_from_db.email == email
def test_update_user(self, db_session):
@@ -138,6 +137,37 @@ def test_verify_email(self, db_session):
assert user.emails[0].verified
assert not user.emails[1].verified
+ def test_find_by_email(self, db_session):
+ service = services.DatabaseUserService(db_session)
+ user = UserFactory.create()
+ EmailFactory.create(user=user, primary=True, verified=False)
+
+ found_userid = service.find_userid_by_email(user.emails[0].email)
+ db_session.flush()
+
+ assert user.id == found_userid
+
+ def test_find_by_email_not_found(self, db_session):
+ service = services.DatabaseUserService(db_session)
+ assert service.find_userid_by_email("something") is None
+
+ def test_create_login_success(self, db_session):
+ service = services.DatabaseUserService(db_session)
+ user = service.create_user(
+ "test_user", "test_name", "test_password", "test_email")
+
+ assert user.id is not None
+ # now make sure that we can log in as that user
+ assert service.check_password(user.id, "test_password")
+
+ def test_create_login_error(self, db_session):
+ service = services.DatabaseUserService(db_session)
+ user = service.create_user(
+ "test_user", "test_name", "test_password", "test_email")
+
+ assert user.id is not None
+ assert not service.check_password(user.id, "bad_password")
+
def test_database_login_factory(monkeypatch):
service_obj = pretend.stub()
diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py
--- a/tests/unit/accounts/test_views.py
+++ b/tests/unit/accounts/test_views.py
@@ -56,12 +56,12 @@ class TestLogin:
@pytest.mark.parametrize("next_url", [None, "/foo/bar/", "/wat/"])
def test_get_returns_form(self, pyramid_request, next_url):
- login_service = pretend.stub()
+ user_service = pretend.stub()
pyramid_request.find_service = pretend.call_recorder(
- lambda iface, context: login_service
+ lambda iface, context: user_service
)
form_obj = pretend.stub()
- form_class = pretend.call_recorder(lambda d, login_service: form_obj)
+ form_class = pretend.call_recorder(lambda d, user_service: form_obj)
if next_url is not None:
pyramid_request.GET["next"] = next_url
@@ -76,20 +76,20 @@ def test_get_returns_form(self, pyramid_request, next_url):
pretend.call(IUserService, context=None),
]
assert form_class.calls == [
- pretend.call(pyramid_request.POST, login_service=login_service),
+ pretend.call(pyramid_request.POST, user_service=user_service),
]
@pytest.mark.parametrize("next_url", [None, "/foo/bar/", "/wat/"])
def test_post_invalid_returns_form(self, pyramid_request, next_url):
- login_service = pretend.stub()
+ user_service = pretend.stub()
pyramid_request.find_service = pretend.call_recorder(
- lambda iface, context: login_service
+ lambda iface, context: user_service
)
pyramid_request.method = "POST"
if next_url is not None:
pyramid_request.POST["next"] = next_url
form_obj = pretend.stub(validate=pretend.call_recorder(lambda: False))
- form_class = pretend.call_recorder(lambda d, login_service: form_obj)
+ form_class = pretend.call_recorder(lambda d, user_service: form_obj)
result = views.login(pyramid_request, _form_class=form_class)
@@ -101,7 +101,7 @@ def test_post_invalid_returns_form(self, pyramid_request, next_url):
pretend.call(IUserService, context=None),
]
assert form_class.calls == [
- pretend.call(pyramid_request.POST, login_service=login_service),
+ pretend.call(pyramid_request.POST, user_service=user_service),
]
assert form_obj.validate.calls == [pretend.call()]
@@ -115,11 +115,11 @@ def test_post_validate_redirects(self, monkeypatch, pyramid_request,
new_session = {}
- login_service = pretend.stub(
+ user_service = pretend.stub(
find_userid=pretend.call_recorder(lambda username: 1),
)
pyramid_request.find_service = pretend.call_recorder(
- lambda iface, context: login_service
+ lambda iface, context: user_service
)
pyramid_request.method = "POST"
pyramid_request.session = pretend.stub(
@@ -138,7 +138,7 @@ def test_post_validate_redirects(self, monkeypatch, pyramid_request,
validate=pretend.call_recorder(lambda: True),
username=pretend.stub(data="theuser"),
)
- form_class = pretend.call_recorder(lambda d, login_service: form_obj)
+ form_class = pretend.call_recorder(lambda d, user_service: form_obj)
result = views.login(pyramid_request, _form_class=form_class)
@@ -148,11 +148,11 @@ def test_post_validate_redirects(self, monkeypatch, pyramid_request,
assert result.headers["foo"] == "bar"
assert form_class.calls == [
- pretend.call(pyramid_request.POST, login_service=login_service),
+ pretend.call(pyramid_request.POST, user_service=user_service),
]
assert form_obj.validate.calls == [pretend.call()]
- assert login_service.find_userid.calls == [pretend.call("theuser")]
+ assert user_service.find_userid.calls == [pretend.call("theuser")]
if with_user:
assert new_session == {}
@@ -177,11 +177,11 @@ def test_post_validate_redirects(self, monkeypatch, pyramid_request,
)
def test_post_validate_no_redirects(self, pyramid_request,
expected_next_url, observed_next_url):
- login_service = pretend.stub(
+ user_service = pretend.stub(
find_userid=pretend.call_recorder(lambda username: 1),
)
pyramid_request.find_service = pretend.call_recorder(
- lambda iface, context: login_service
+ lambda iface, context: user_service
)
pyramid_request.method = "POST"
pyramid_request.POST["next"] = expected_next_url
@@ -190,7 +190,7 @@ def test_post_validate_no_redirects(self, pyramid_request,
validate=pretend.call_recorder(lambda: True),
username=pretend.stub(data="theuser"),
)
- form_class = pretend.call_recorder(lambda d, login_service: form_obj)
+ form_class = pretend.call_recorder(lambda d, user_service: form_obj)
result = views.login(pyramid_request, _form_class=form_class)
@@ -245,3 +245,51 @@ def test_post_redirects_user(self, pyramid_request, expected_next_url,
assert isinstance(result, HTTPSeeOther)
assert result.headers["Location"] == observed_next_url
+
+
+class TestRegister:
+ def test_get(self, pyramid_request):
+ form_inst = pretend.stub()
+ form = pretend.call_recorder(lambda *args, **kwargs: form_inst)
+ pyramid_request.find_service = pretend.call_recorder(
+ lambda *args, **kwargs: pretend.stub(
+ enabled=False,
+ csp_policy=pretend.stub(),
+ merge=lambda _: None,
+ )
+ )
+ result = views.register(pyramid_request, _form_class=form)
+ assert result["form"] is form_inst
+
+ def test_redirect_authenticated_user(self):
+ result = views.register(pretend.stub(authenticated_userid=1))
+ assert isinstance(result, HTTPSeeOther)
+ assert result.headers["Location"] == "/"
+
+ def test_register_redirect(self, pyramid_request):
+ pyramid_request.method = "POST"
+ pyramid_request.find_service = pretend.call_recorder(
+ lambda *args, **kwargs: pretend.stub(
+ csp_policy={},
+ merge=lambda _: {},
+ enabled=False,
+ verify_response=pretend.call_recorder(lambda _: None),
+ find_userid=pretend.call_recorder(lambda _: None),
+ find_userid_by_email=pretend.call_recorder(lambda _: None),
+ create_user=pretend.call_recorder(
+ lambda *args, **kwargs: pretend.stub(id=1),
+ ),
+ )
+ )
+ pyramid_request.route_path = pretend.call_recorder(lambda name: "/")
+ pyramid_request.POST.update({
+ "username": "username_value",
+ "password": "MyStr0ng!shP455w0rd",
+ "password_confirm": "MyStr0ng!shP455w0rd",
+ "email": "foo@bar.com",
+ "full_name": "full_name",
+ })
+
+ result = views.register(pyramid_request)
+ assert isinstance(result, HTTPSeeOther)
+ assert result.headers["Location"] == "/"
diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py
--- a/tests/unit/test_config.py
+++ b/tests/unit/test_config.py
@@ -325,8 +325,10 @@ def __init__(self):
pretend.call(".packaging"),
pretend.call(".redirects"),
pretend.call(".routes"),
- pretend.call(".csp"),
pretend.call(".raven"),
+ pretend.call(".csp"),
+ pretend.call(".recaptcha"),
+ pretend.call(".http"),
] + [
pretend.call(x) for x in [
configurator_settings.get("warehouse.theme"),
@@ -352,6 +354,11 @@ def __init__(self):
"tm.activate_hook": config.activate_hook,
"tm.annotate_user": False,
}),
+ pretend.call({
+ "http": {
+ "verify": "/etc/ssl/certs/",
+ },
+ }),
]
add_settings_dict = configurator_obj.add_settings.calls[1].args[0]
assert add_settings_dict["tm.manager_hook"](pretend.stub()) is \
diff --git a/tests/unit/test_csp.py b/tests/unit/test_csp.py
--- a/tests/unit/test_csp.py
+++ b/tests/unit/test_csp.py
@@ -152,6 +152,23 @@ def test_devel_csp(self):
}
+class TestCSPPolicy:
+ def test_create(self):
+ policy = csp.CSPPolicy({"foo": ["bar"]})
+ assert isinstance(policy, collections.defaultdict)
+
+ def test_merge(self):
+ policy = csp.CSPPolicy({"foo": ["bar"]})
+ policy.merge({
+ "foo": ["baz"],
+ "something": ["else"],
+ })
+ assert policy == {
+ "foo": ["bar", "baz"],
+ "something": ["else"],
+ }
+
+
def test_includeme():
config = pretend.stub(
register_service_factory=pretend.call_recorder(
@@ -195,3 +212,36 @@ def test_includeme():
},
})
]
+
+
+class TestFactory:
+ def test_copy(self):
+ settings = {
+ "csp": {
+ "foo": "bar",
+ },
+ }
+ request = pretend.stub(
+ registry=pretend.stub(
+ settings=settings
+ )
+ )
+ result = csp.csp_factory(None, request)
+ assert isinstance(result, csp.CSPPolicy)
+ assert result == settings["csp"]
+
+ # ensure changes to factory result don't propagate back to the
+ # settings
+ result["baz"] = "foo"
+ assert result == {"foo": "bar", "baz": "foo"}
+ assert settings == {"csp": {"foo": "bar"}}
+
+ def test_default(self):
+ request = pretend.stub(
+ registry=pretend.stub(
+ settings={}
+ )
+ )
+ result = csp.csp_factory(None, request)
+ assert isinstance(result, csp.CSPPolicy)
+ assert result == {}
diff --git a/tests/unit/test_http.py b/tests/unit/test_http.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/test_http.py
@@ -0,0 +1,76 @@
+import threading
+import queue
+
+import pretend
+
+import warehouse.http
+
+_REQUEST = pretend.stub(
+ log=pretend.stub(
+ debug=pretend.call_recorder(lambda *args: None),
+ )
+)
+
+
+class TestSession:
+ def test_create(self):
+ config = {
+ "verify": "foo",
+ }
+
+ factory = warehouse.http.ThreadLocalSessionFactory(config)
+ session_a, session_b = factory(_REQUEST), factory(_REQUEST)
+ assert session_a is session_b
+ assert session_a.verify == session_b.verify == config["verify"]
+
+ def test_threads(self):
+ def _test_factory(fifo, start):
+ start.wait()
+ factory = warehouse.http.ThreadLocalSessionFactory()
+ # the actual session instance is stuck into the queue here as to
+ # maintain a reference so it's not gc'd (which can result in id
+ # reuse)
+ fifo.put(
+ (threading.get_ident(), factory(_REQUEST))
+ )
+
+ start = threading.Event()
+
+ fifo = queue.Queue()
+ threads = [
+ threading.Thread(target=_test_factory, args=(fifo, start))
+ for _ in range(10)
+ ]
+
+ for thread in threads:
+ thread.start()
+
+ start.set()
+
+ for thread in threads:
+ thread.join()
+
+ # data pushed into the queue is (threadid, session).
+ # this basically proves that the session object id is different per
+ # thread
+ results = [fifo.get() for _ in range(len(threads))]
+ idents, objects = zip(*results)
+ assert len(set(idents)) == len(threads)
+ assert len(set(id(obj) for obj in objects)) == len(threads)
+
+
+def test_includeme():
+ config = pretend.stub(
+ registry=pretend.stub(
+ settings={},
+ ),
+ add_request_method=pretend.call_recorder(
+ lambda *args, **kwargs: None
+ ),
+ )
+ warehouse.http.includeme(config)
+
+ assert len(config.add_request_method.calls) == 1
+ call = config.add_request_method.calls[0]
+ assert isinstance(call.args[0], warehouse.http.ThreadLocalSessionFactory)
+ assert call.kwargs == {"name": "http", "reify": True}
diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py
--- a/tests/unit/test_logging.py
+++ b/tests/unit/test_logging.py
@@ -13,6 +13,7 @@
import json
import logging
import logging.config
+import threading
import uuid
from unittest import mock
@@ -49,6 +50,7 @@ def test_non_warehouse_logger_renders(self):
"logger": "another.logger",
"level": "INFO",
"event": "the message",
+ "thread": threading.get_ident(),
}
diff --git a/tests/unit/test_recaptcha.py b/tests/unit/test_recaptcha.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/test_recaptcha.py
@@ -0,0 +1,251 @@
+import socket
+import urllib.parse
+from os import environ
+
+import pytest
+import pretend
+import requests
+import responses
+
+from warehouse import recaptcha
+
+
+_SETTINGS = {
+ "recaptcha": {
+ "site_key": "site_key_value",
+ "secret_key": "secret_key_value",
+ },
+}
+_REQUEST = pretend.stub(
+ # returning a real requests.Session object because responses is responsible
+ # for mocking that out
+ http=requests.Session(),
+ registry=pretend.stub(
+ settings=_SETTINGS,
+ ),
+)
+
+
+class TestVerifyResponse:
+ @responses.activate
+ def test_verify_service_disabled(self):
+ responses.add(
+ responses.POST,
+ recaptcha.VERIFY_URL,
+ body="",
+ )
+ serv = recaptcha.Service(
+ pretend.stub(registry=pretend.stub(settings={}))
+ )
+ assert serv.verify_response('') is None
+ assert not responses.calls
+
+ @responses.activate
+ def test_remote_ip_payload(self):
+ responses.add(
+ responses.POST,
+ recaptcha.VERIFY_URL,
+ json={"success": True},
+ )
+ serv = recaptcha.Service(_REQUEST)
+ serv.verify_response("meaningless", remote_ip="ip")
+
+ payload = dict(urllib.parse.parse_qsl(responses.calls[0].request.body))
+ assert payload["remoteip"] == "ip"
+
+ @responses.activate
+ def test_unexpected_data_error(self):
+ responses.add(
+ responses.POST,
+ recaptcha.VERIFY_URL,
+ body="something awful",
+ )
+ serv = recaptcha.Service(_REQUEST)
+
+ with pytest.raises(recaptcha.UnexpectedError) as err:
+ serv.verify_response("meaningless")
+
+ expected = "Unexpected data in response body: something awful"
+ assert str(err.value) == expected
+
+ @responses.activate
+ def test_missing_success_key_error(self):
+ responses.add(
+ responses.POST,
+ recaptcha.VERIFY_URL,
+ json={"foo": "bar"},
+ )
+ serv = recaptcha.Service(_REQUEST)
+
+ with pytest.raises(recaptcha.UnexpectedError) as err:
+ serv.verify_response("meaningless")
+
+ expected = "Missing 'success' key in response: {'foo': 'bar'}"
+ assert str(err.value) == expected
+
+ @responses.activate
+ def test_missing_error_codes_key_error(self):
+ responses.add(
+ responses.POST,
+ recaptcha.VERIFY_URL,
+ json={"success": False},
+ )
+ serv = recaptcha.Service(_REQUEST)
+
+ with pytest.raises(recaptcha.UnexpectedError) as err:
+ serv.verify_response("meaningless")
+
+ expected = "Response missing 'error-codes' key: {'success': False}"
+ assert str(err.value) == expected
+
+ @responses.activate
+ def test_error_map_error(self):
+ for key, exc_tp in recaptcha.ERROR_CODE_MAP.items():
+ responses.add(
+ responses.POST,
+ recaptcha.VERIFY_URL,
+ json={
+ "success": False,
+ "challenge_ts": 0,
+ "hostname": "hotname_value",
+ "error_codes": [key]
+ }
+ )
+
+ serv = recaptcha.Service(_REQUEST)
+ with pytest.raises(exc_tp):
+ serv.verify_response("meaningless")
+
+ responses.reset()
+
+ @responses.activate
+ def test_error_map_unknown_error(self):
+ responses.add(
+ responses.POST,
+ recaptcha.VERIFY_URL,
+ json={
+ "success": False,
+ "challenge_ts": 0,
+ "hostname": "hostname_value",
+ "error_codes": ["slartibartfast"],
+ },
+ )
+
+ serv = recaptcha.Service(_REQUEST)
+ with pytest.raises(recaptcha.UnexpectedError) as err:
+ serv.verify_response("meaningless")
+ assert str(err) == "Unexpected error code: slartibartfast"
+
+ @responses.activate
+ def test_challenge_response_missing_timestamp_success(self):
+ responses.add(
+ responses.POST,
+ recaptcha.VERIFY_URL,
+ json={
+ "success": True,
+ "hostname": "hostname_value",
+ },
+ )
+
+ serv = recaptcha.Service(_REQUEST)
+ res = serv.verify_response("meaningless")
+
+ assert isinstance(res, recaptcha.ChallengeResponse)
+ assert res.challenge_ts is None
+ assert res.hostname == "hostname_value"
+
+ @responses.activate
+ def test_challenge_response_missing_hostname_success(self):
+ responses.add(
+ responses.POST,
+ recaptcha.VERIFY_URL,
+ json={
+ "success": True,
+ "challenge_ts": 0,
+ },
+ )
+
+ serv = recaptcha.Service(_REQUEST)
+ res = serv.verify_response("meaningless")
+
+ assert isinstance(res, recaptcha.ChallengeResponse)
+ assert res.hostname is None
+ assert res.challenge_ts == 0
+
+ @responses.activate
+ def test_challenge_response_success(self):
+ responses.add(
+ responses.POST,
+ recaptcha.VERIFY_URL,
+ json={
+ "success": True,
+ "hostname": "hostname_value",
+ "challenge_ts": 0,
+ },
+ )
+
+ serv = recaptcha.Service(_REQUEST)
+ res = serv.verify_response("meaningless")
+
+ assert isinstance(res, recaptcha.ChallengeResponse)
+ assert res.hostname == "hostname_value"
+ assert res.challenge_ts == 0
+
+ @responses.activate
+ def test_unexpected_error(self):
+ serv = recaptcha.Service(_REQUEST)
+ serv.request.http.post = pretend.raiser(socket.error)
+
+ with pytest.raises(recaptcha.UnexpectedError):
+ serv.verify_response("meaningless")
+
+
+class TestCSPPolicy:
+ def test_csp_policy(self):
+ scheme = 'https'
+ request = pretend.stub(
+ scheme=scheme,
+ registry=pretend.stub(settings={
+ "recaptcha": {
+ "site_key": "foo",
+ "secret_key": "bar",
+ },
+ })
+ )
+ serv = recaptcha.Service(request)
+ assert serv.csp_policy == {
+ "script-src": [
+ "%s://www.google.com/recaptcha/" % scheme,
+ "%s://www.gstatic.com/recaptcha/" % scheme,
+ ],
+ "frame-src": ["https://www.google.com/recaptcha/"],
+ "style-src": ["'unsafe-inline'"],
+ }
+
+
+def test_service_factory():
+ serv = recaptcha.service_factory(None, _REQUEST)
+ assert serv.request is _REQUEST
+
+
+def test_includeme():
+ config = pretend.stub(
+ register_service_factory=pretend.call_recorder(
+ lambda fact, name: None
+ ),
+ add_settings=pretend.call_recorder(lambda settings: None),
+ )
+ recaptcha.includeme(config)
+
+ assert config.register_service_factory.calls == [
+ pretend.call(recaptcha.service_factory, name="recaptcha"),
+ ]
+
+ assert config.add_settings.calls == [
+ pretend.call({
+ "recaptcha": {
+ "site_key": environ.get("RECAPTCHA_SITE_KEY"),
+ "secret_key": environ.get("RECAPTCHA_SECRET_KEY"),
+ },
+ }),
+ ]
diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py
--- a/tests/unit/test_routes.py
+++ b/tests/unit/test_routes.py
@@ -75,6 +75,7 @@ def add_xmlrpc_endpoint(endpoint, pattern, header, read_only=False):
),
pretend.call("accounts.login", "/account/login/"),
pretend.call("accounts.logout", "/account/logout/"),
+ pretend.call("accounts.register", "/account/register/"),
pretend.call(
"packaging.project",
"/project/{name}/",
| Add user registration functionality
New items:
- Recaptcha service (conditionally functional, dependent on env vars)
- forms.RegistrationForm
- Reusable recaptcha template
Also added the responses lib as a dependency for mocking recaptcha tests
Features:
- [x] reCaptcha service
- [x] Reusable reCapatcha template
- [x] `forms.RegistrationForm`
- [x] `views.register`
Review items:
- [x] Address SSL verification issue
- [x] Expose reCaptcha keys through make targets and docker-compose
- [x] Move from base `requests.post` to a thread local session implementation
- [x] Handle timeouts & other lower level socket errors in recaptcha.py
- [x] Security review on session handling in registration handler
- [x] Bottom load recaptcha scripts
- [x] Create a threadlocal requests session
- [x] Configure sane timeouts for `requests.Session` instance
| 2016-03-17T19:16:19Z | [] | [] |
|
pypi/warehouse | 1,077 | pypi__warehouse-1077 | [
"1076"
] | fea0e4bdf1633cb2d9dd4f0f863b551b3e403d66 | diff --git a/warehouse/utils/paginate.py b/warehouse/utils/paginate.py
--- a/warehouse/utils/paginate.py
+++ b/warehouse/utils/paginate.py
@@ -15,12 +15,30 @@
class _ElasticsearchWrapper:
+ max_results = 10000
+
def __init__(self, query):
self.query = query
self.results = None
self.best_guess = None
def __getitem__(self, range):
+ # If we're asking for a range that extends past our maximum results,
+ # then we need to clamp the start of our slice to our maximum results
+ # size, and make sure that the end of our slice >= to that to ensure a
+ # consistent slice.
+ if range.start > self.max_results:
+ range = slice(
+ self.max_results,
+ max(range.stop, self.max_results),
+ range.step,
+ )
+
+ # If we're being asked for a range that extends past our maximum result
+ # then we'll clamp it to the maximum result size and stop there.
+ if range.stop > self.max_results:
+ range = slice(range.start, self.max_results, range.step)
+
if self.results is not None:
raise RuntimeError("Cannot reslice after having already sliced.")
self.results = self.query[range].execute()
@@ -35,7 +53,7 @@ def __getitem__(self, range):
def __len__(self):
if self.results is None:
raise RuntimeError("Cannot get length until a slice.")
- return self.results.hits.total
+ return min(self.results.hits.total, self.max_results)
def ElasticsearchPage(*args, **kwargs): # noqa
diff --git a/warehouse/views.py b/warehouse/views.py
--- a/warehouse/views.py
+++ b/warehouse/views.py
@@ -11,7 +11,7 @@
# limitations under the License.
from pyramid.httpexceptions import (
- HTTPException, HTTPSeeOther, HTTPMovedPermanently,
+ HTTPException, HTTPSeeOther, HTTPMovedPermanently, HTTPNotFound,
)
from pyramid.view import (
notfound_view_config, forbidden_view_config, view_config,
@@ -173,12 +173,16 @@ def search(request):
if request.params.get("o"):
query = query.sort(request.params["o"])
+ page_num = int(request.params.get("page", 1))
page = ElasticsearchPage(
query,
- page=int(request.params.get("page", 1)),
+ page=page_num,
url_maker=paginate_url_factory(request),
)
+ if page_num > page.page_count:
+ raise HTTPNotFound
+
return {
"page": page,
"term": request.params.get("q"),
| diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py
--- a/tests/unit/test_views.py
+++ b/tests/unit/test_views.py
@@ -15,6 +15,8 @@
import pretend
import pytest
+from pyramid.httpexceptions import HTTPNotFound
+
from warehouse import views
from warehouse.views import (
forbidden, index, httpexception_view, robotstxt, current_user_indicator,
@@ -111,7 +113,7 @@ def test_with_a_query(self, monkeypatch, page):
params=params,
)
- page_obj = pretend.stub()
+ page_obj = pretend.stub(page_count=(page or 1) + 10)
page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
monkeypatch.setattr(views, "ElasticsearchPage", page_cls)
@@ -167,7 +169,7 @@ def test_with_an_ordering(self, monkeypatch, page):
params=params,
)
- page_obj = pretend.stub()
+ page_obj = pretend.stub(page_count=(page or 1) + 10)
page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
monkeypatch.setattr(views, "ElasticsearchPage", page_cls)
@@ -218,7 +220,7 @@ def test_without_a_query(self, monkeypatch, page):
params=params,
)
- page_obj = pretend.stub()
+ page_obj = pretend.stub(page_count=(page or 1) + 10)
page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
monkeypatch.setattr(views, "ElasticsearchPage", page_cls)
@@ -235,3 +237,27 @@ def test_without_a_query(self, monkeypatch, page):
pretend.call(query, url_maker=url_maker, page=page or 1),
]
assert url_maker_factory.calls == [pretend.call(request)]
+
+ def test_raises_404_with_pagenum_too_high(self, monkeypatch):
+ params = {"page": 15}
+ query = pretend.stub()
+ request = pretend.stub(
+ es=pretend.stub(query=lambda: query),
+ params=params,
+ )
+
+ page_obj = pretend.stub(page_count=10)
+ page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
+ monkeypatch.setattr(views, "ElasticsearchPage", page_cls)
+
+ url_maker = pretend.stub()
+ url_maker_factory = pretend.call_recorder(lambda request: url_maker)
+ monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)
+
+ with pytest.raises(HTTPNotFound):
+ search(request)
+
+ assert page_cls.calls == [
+ pretend.call(query, url_maker=url_maker, page=15 or 1),
+ ]
+ assert url_maker_factory.calls == [pretend.call(request)]
diff --git a/tests/unit/utils/test_paginate.py b/tests/unit/utils/test_paginate.py
--- a/tests/unit/utils/test_paginate.py
+++ b/tests/unit/utils/test_paginate.py
@@ -94,6 +94,18 @@ def test_slices_and_length(self):
assert wrapper[1:3] == [2, 3]
assert len(wrapper) == 6
+ def test_slice_start_clamps_to_max(self):
+ wrapper = paginate._ElasticsearchWrapper(FakeQuery([1, 2, 3, 4, 5, 6]))
+ wrapper.max_results = 5
+ assert wrapper[6:10] == []
+ assert len(wrapper) == 5
+
+ def test_slice_end_clamps_to_max(self):
+ wrapper = paginate._ElasticsearchWrapper(FakeQuery([1, 2, 3, 4, 5, 6]))
+ wrapper.max_results = 5
+ assert wrapper[1:10] == [2, 3, 4, 5]
+ assert len(wrapper) == 5
+
def test_second_slice_fails(self):
wrapper = paginate._ElasticsearchWrapper(FakeQuery([1, 2, 3, 4, 5, 6]))
wrapper[1:3]
| TransportError: TransportError(500, 'search_phase_execution_exception', 'Result window is too lar...
https://app.getsentry.com/python-software-foundation/warehouse-production/issues/118467761/
```
TransportError: TransportError(500, 'search_phase_execution_exception', 'Result window is too large, from + size must be less than or equal to: [10000] but was [10640]. See the scroll api for a more efficient way to request large data sets. This limit can be set by changing the [index.max_result_window] index level parameter.')
(18 additional frame(s) were not displayed)
...
File "warehouse/utils/http.py", line 31, in wrapped
return fn(context, request)
File "warehouse/cache/origin/__init__.py", line 81, in wrapped
return view(context, request)
File "warehouse/views.py", line 179, in search
url_maker=paginate_url_factory(request),
File "warehouse/utils/paginate.py", line 43, in ElasticsearchPage
return Page(*args, **kwargs)
File "warehouse/utils/paginate.py", line 26, in __getitem__
self.results = self.query[range].execute()
```
| 2016-04-17T15:29:09Z | [] | [] |
|
pypi/warehouse | 1,164 | pypi__warehouse-1164 | [
"1163",
"1163"
] | f1f817698456e05cfe5e275558cc75988f3f725f | diff --git a/warehouse/utils/paginate.py b/warehouse/utils/paginate.py
--- a/warehouse/utils/paginate.py
+++ b/warehouse/utils/paginate.py
@@ -44,9 +44,10 @@ def __getitem__(self, range):
self.results = self.query[range].execute()
if hasattr(self.results, "suggest"):
- suggestion = self.results.suggest.name_suggestion[0]
- if suggestion.options:
- self.best_guess = suggestion.options[0]
+ if self.results.suggest.name_suggestion:
+ suggestion = self.results.suggest.name_suggestion[0]
+ if suggestion.options:
+ self.best_guess = suggestion.options[0]
return list(self.results)
diff --git a/warehouse/views.py b/warehouse/views.py
--- a/warehouse/views.py
+++ b/warehouse/views.py
@@ -182,7 +182,7 @@ def search(request):
url_maker=paginate_url_factory(request),
)
- if page_num > page.page_count:
+ if page.page_count and page_num > page.page_count:
raise HTTPNotFound
available_filters = collections.defaultdict(list)
| diff --git a/tests/unit/utils/test_paginate.py b/tests/unit/utils/test_paginate.py
--- a/tests/unit/utils/test_paginate.py
+++ b/tests/unit/utils/test_paginate.py
@@ -47,14 +47,17 @@ def __iter__(self):
class FakeSuggestResult(FakeResult):
- def __init__(self, data, total, options):
+ def __init__(self, data, total, options=None, suggestion=None):
super().__init__(data, total)
self.options = options
+ self.suggestion = suggestion
@property
def suggest(self):
- suggestion = FakeSuggestion(options=self.options)
- return FakeSuggest(name_suggestion=[suggestion])
+ if self.suggestion is None:
+ suggestion = FakeSuggestion(options=self.options)
+ return FakeSuggest(name_suggestion=[suggestion])
+ return FakeSuggest(name_suggestion=self.suggestion)
class FakeQuery:
@@ -77,14 +80,15 @@ def execute(self):
class FakeSuggestQuery(FakeQuery):
- def __init__(self, fake, options):
+ def __init__(self, fake, options=None, suggestion=None):
super().__init__(fake)
self.options = options
+ self.suggestion = suggestion
def execute(self):
data = self.fake[self.range]
total = len(self.fake)
- return FakeSuggestResult(data, total, self.options)
+ return FakeSuggestResult(data, total, self.options, self.suggestion)
class TestElasticsearchWrapper:
@@ -127,6 +131,13 @@ def test_best_guess_suggestion(self):
assert wrapper.best_guess == fake_option
+ def test_best_guess_suggestion_no_suggestions(self):
+ query = FakeSuggestQuery([1, 2, 3, 4, 5, 6], suggestion=[])
+ wrapper = paginate._ElasticsearchWrapper(query)
+ wrapper[1:3]
+
+ assert wrapper.best_guess is None
+
def test_best_guess_suggestion_no_options(self):
query = FakeSuggestQuery([1, 2, 3, 4, 5, 6], options=[])
wrapper = paginate._ElasticsearchWrapper(query)
| error 404 when searching for a package name with underscores
when I try to search for a package name which contains underscores I get a 404 error
example: https://pypi.io/search/?q=test_test
error 404 when searching for a package name with underscores
when I try to search for a package name which contains underscores I get a 404 error
example: https://pypi.io/search/?q=test_test
| 2016-05-04T17:32:01Z | [] | [] |
|
pypi/warehouse | 1,178 | pypi__warehouse-1178 | [
"1029"
] | 45e6d546849c59dd7fcf96a2277fb10779b89641 | diff --git a/warehouse/packaging/search.py b/warehouse/packaging/search.py
--- a/warehouse/packaging/search.py
+++ b/warehouse/packaging/search.py
@@ -21,12 +21,18 @@
filter=["standard", "lowercase", "stop", "snowball"],
)
+NameAnalyzer = analyzer(
+ "normalized_name",
+ tokenizer="lowercase",
+ filter=["standard", "lowercase", "word_delimiter"],
+)
+
@doc_type
class Project(DocType):
name = String()
- normalized_name = String(index="not_analyzed")
+ normalized_name = String(analyzer=NameAnalyzer, index_options="docs")
version = String(index="not_analyzed", multi=True)
summary = String(analyzer="snowball")
description = String(analyzer="snowball")
diff --git a/warehouse/views.py b/warehouse/views.py
--- a/warehouse/views.py
+++ b/warehouse/views.py
@@ -157,9 +157,10 @@ def search(request):
"multi_match",
query=request.params["q"],
fields=[
- "name^2", "version", "author", "author_email", "maintainer",
- "maintainer_email", "home_page", "license", "summary",
- "description", "keywords", "platform", "download_url",
+ "author", "author_email", "description^5", "download_url",
+ "home_page", "keywords^5", "license", "maintainer",
+ "maintainer_email", "normalized_name^10", "platform",
+ "summary^5",
],
).suggest(
name="name_suggestion",
| diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py
--- a/tests/unit/test_views.py
+++ b/tests/unit/test_views.py
@@ -141,10 +141,10 @@ def test_with_a_query(self, monkeypatch, db_request, page):
"multi_match",
query="foo bar",
fields=[
- "name^2", "version", "author", "author_email",
- "maintainer", "maintainer_email", "home_page", "license",
- "summary", "description", "keywords", "platform",
- "download_url",
+ "author", "author_email", "description^5", "download_url",
+ "home_page", "keywords^5", "license", "maintainer",
+ "maintainer_email", "normalized_name^10", "platform",
+ "summary^5",
],
),
]
@@ -198,10 +198,10 @@ def test_with_an_ordering(self, monkeypatch, db_request, page):
"multi_match",
query="foo bar",
fields=[
- "name^2", "version", "author", "author_email",
- "maintainer", "maintainer_email", "home_page", "license",
- "summary", "description", "keywords", "platform",
- "download_url",
+ "author", "author_email", "description^5", "download_url",
+ "home_page", "keywords^5", "license", "maintainer",
+ "maintainer_email", "normalized_name^10", "platform",
+ "summary^5",
],
),
]
@@ -270,10 +270,10 @@ def test_with_classifiers(self, monkeypatch, db_request, page):
"multi_match",
query="foo bar",
fields=[
- "name^2", "version", "author", "author_email",
- "maintainer", "maintainer_email", "home_page", "license",
- "summary", "description", "keywords", "platform",
- "download_url",
+ "author", "author_email", "description^5", "download_url",
+ "home_page", "keywords^5", "license", "maintainer",
+ "maintainer_email", "normalized_name^10", "platform",
+ "summary^5",
],
),
]
| Search relevancy is still not ideal
#1020 fixed #1019 for the majority of packages, however a few still produce odd results:
For example:
https://warehouse.python.org/search/?q=flask (`Flask` package is 2nd, `Flask-Admin` is first)
https://warehouse.python.org/search/?q=django (`Django` package is 11th, `dotulu` is first)
https://warehouse.python.org/search/?q=git (First 3 packages do not have "git" anywhere in them)
This is hard to test in dev because the dev DB is a snapshot of TestPyPI, and those packages are missing.
@dstufft, would it be possible to get a more complete DB for local development?
| Yea, I'll see if I can't just dump the regular PyPI database and sanitize it.
Sorry for the delay. I have a snaphot of the real PyPI I can send you, but I'd rather not add it to the repository because even compressed it's quite large (~270MB) and although I sanitized it I'd rather not risk having missed something. Is there an email address or something I can send you a link via?
Assuming you're still interested in this anyways!
Sure! You can use the address on my github profile.
| 2016-05-06T20:56:05Z | [] | [] |
pypi/warehouse | 1,181 | pypi__warehouse-1181 | [
"1112"
] | d57186c3b5f783e2235b7d5be610302adce23c6f | diff --git a/warehouse/celery.py b/warehouse/celery.py
--- a/warehouse/celery.py
+++ b/warehouse/celery.py
@@ -21,13 +21,16 @@
from celery.signals import celeryd_init
from pyramid import scripting
from pyramid.threadlocal import get_current_request
+from raven.contrib.celery import register_signal, register_logger_signal
from warehouse.config import Environment, configure
@celeryd_init.connect
def _configure_celery(*args, **kwargs):
- configure()
+ config = configure()
+ register_logger_signal(config.registry["raven.client"])
+ register_signal(config.registry["raven.client"])
class TLSRedisBackend(_RedisBackend):
| diff --git a/tests/unit/test_celery.py b/tests/unit/test_celery.py
--- a/tests/unit/test_celery.py
+++ b/tests/unit/test_celery.py
@@ -21,12 +21,27 @@
def test_configure_celery(monkeypatch):
- configure = pretend.call_recorder(lambda: None)
+ client = pretend.stub()
+ getitem = pretend.call_recorder(lambda *a: client)
+ registry = pretend.stub(__getitem__=getitem)
+ configure = pretend.call_recorder(lambda: pretend.stub(registry=registry))
+ register_logger_signal = pretend.call_recorder(lambda x: None)
+ register_signal = pretend.call_recorder(lambda x: None)
monkeypatch.setattr(celery, "configure", configure)
+ monkeypatch.setattr(
+ celery, "register_logger_signal", register_logger_signal
+ )
+ monkeypatch.setattr(celery, "register_signal", register_signal)
celery._configure_celery()
assert configure.calls == [pretend.call()]
+ assert getitem.calls == [
+ pretend.call('raven.client'),
+ pretend.call('raven.client'),
+ ]
+ assert register_logger_signal.calls == [pretend.call(client)]
+ assert register_signal.calls == [pretend.call(client)]
def test_tls_redis_backend():
| Errors in celery don't get sent to Sentry
| 2016-05-09T20:58:27Z | [] | [] |
|
pypi/warehouse | 1,210 | pypi__warehouse-1210 | [
"1194"
] | bd5b14b54b0f1402273b9e8882d97df8742b0b66 | diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -123,6 +123,7 @@ def configure(settings=None):
maybe_set(settings, "warehouse.theme", "WAREHOUSE_THEME")
maybe_set(settings, "warehouse.domain", "WAREHOUSE_DOMAIN")
maybe_set(settings, "forklift.domain", "FORKLIFT_DOMAIN")
+ maybe_set(settings, "warehouse.legacy_domain", "WAREHOUSE_LEGACY_DOMAIN")
maybe_set(settings, "site.name", "SITE_NAME", default="Warehouse")
maybe_set(settings, "aws.key_id", "AWS_ACCESS_KEY_ID")
maybe_set(settings, "aws.secret_key", "AWS_SECRET_ACCESS_KEY")
diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py
--- a/warehouse/forklift/legacy.py
+++ b/warehouse/forklift/legacy.py
@@ -21,6 +21,7 @@
import packaging.requirements
import packaging.version
import pkg_resources
+import requests
import wtforms
import wtforms.validators
from rfc3986 import uri_reference
@@ -944,6 +945,17 @@ def file_upload(request):
},
)
+ # TODO: Once we no longer have the legacy code base running PyPI we can
+ # go ahead and delete this tiny bit of shim code, since it only
+ # exists to purge stuff on legacy PyPI when uploaded to Warehouse
+ old_domain = request.registry.settings.get("warehouse.legacy_domain")
+ if old_domain:
+ request.tm.get().addAfterCommitHook(
+ requests.post,
+ args=["https://{}/pypi".format(old_domain)],
+ kws={"data": {":action": "purge", "project": project.name}},
+ )
+
return Response()
| diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py
--- a/tests/unit/forklift/test_legacy.py
+++ b/tests/unit/forklift/test_legacy.py
@@ -21,6 +21,7 @@
import pkg_resources
import pretend
import pytest
+import requests
from pyblake2 import blake2b
from pyramid.httpexceptions import HTTPBadRequest, HTTPForbidden
@@ -1652,6 +1653,50 @@ def test_upload_succeeds_creates_project(self, pyramid_config, db_request):
),
]
+ def test_upload_purges_legacy(self, pyramid_config, db_request,
+ monkeypatch):
+ pyramid_config.testing_securitypolicy(userid=1)
+
+ user = UserFactory.create()
+
+ filename = "{}-{}.tar.gz".format("example", "1.0")
+
+ db_request.user = user
+ db_request.POST = MultiDict({
+ "metadata_version": "1.2",
+ "name": "example",
+ "version": "1.0",
+ "filetype": "sdist",
+ "md5_digest": "335c476dc930b959dda9ec82bd65ef19",
+ "content": pretend.stub(
+ filename=filename,
+ file=io.BytesIO(b"A fake file."),
+ type="application/tar",
+ ),
+ })
+
+ storage_service = pretend.stub(store=lambda path, filepath, meta: None)
+ db_request.find_service = lambda svc: storage_service
+ db_request.client_addr = "10.10.10.10"
+
+ tm = pretend.stub(
+ addAfterCommitHook=pretend.call_recorder(lambda *a, **kw: None),
+ )
+ db_request.tm = pretend.stub(get=lambda: tm)
+
+ db_request.registry.settings["warehouse.legacy_domain"] = "example.com"
+
+ resp = legacy.file_upload(db_request)
+
+ assert resp.status_code == 200
+ assert tm.addAfterCommitHook.calls == [
+ pretend.call(
+ requests.post,
+ args=["https://example.com/pypi"],
+ kws={"data": {":action": "purge", "project": "example"}},
+ ),
+ ]
+
def test_fails_without_user(self, pyramid_config, pyramid_request):
pyramid_config.testing_securitypolicy(userid=None)
| Temporarily Purge Legacy PyPI
Uploading should temporarily purge legacy PyPI, currently if someone uploads to Warehouse they need to do something like:
```
$ curl -XPURGE https://pypi.python.org/simple/<name>/
$ curl -XPURGE https://pypi.python.org/pypi/<name>/json
```
To make sure that their upload shows up on PyPI without it having to fall out of the cache. Or they could go and hit save on PyPI legacy at least once to trigger the same thing. This is less than optimal and we should just temporarily do the purges for folks until legacy is retired.
| 2016-06-02T14:06:09Z | [] | [] |
|
pypi/warehouse | 1,237 | pypi__warehouse-1237 | [
"1222"
] | 8911bae4e453f1b3ec63766c86ea5fe185f15dcc | diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py
--- a/warehouse/forklift/legacy.py
+++ b/warehouse/forklift/legacy.py
@@ -674,6 +674,7 @@ def file_upload(request):
c for c in all_classifiers
if c.classifier in form.classifiers.data
],
+ _pypi_hidden=False,
dependencies=list(_construct_dependencies(
form,
{
@@ -727,6 +728,12 @@ def file_upload(request):
releases, key=lambda x: packaging.version.parse(x.version))):
r._pypi_ordering = i
+ # TODO: Again, we should figure out a better solution to doing this than
+ # just inlining this inside this method.
+ if project.autohide:
+ for r in releases:
+ r._pypi_hidden = bool(not r == release)
+
# Pull the filename out of our POST data.
filename = request.POST["content"].filename
| diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py
--- a/tests/unit/forklift/test_legacy.py
+++ b/tests/unit/forklift/test_legacy.py
@@ -1710,6 +1710,160 @@ def test_fails_without_user(self, pyramid_config, pyramid_request):
"403 Invalid or non-existent authentication information."
)
+ def test_autohides_old_releases(self, pyramid_config, db_request):
+ pyramid_config.testing_securitypolicy(userid=1)
+
+ user = UserFactory.create()
+ project = ProjectFactory.create(autohide=True)
+ ReleaseFactory.create(
+ project=project,
+ version="0.5",
+ _pypi_hidden=False,
+ )
+ RoleFactory.create(user=user, project=project)
+
+ db_request.db.add(
+ Classifier(classifier="Environment :: Other Environment"),
+ )
+ db_request.db.add(
+ Classifier(classifier="Programming Language :: Python"),
+ )
+
+ filename = "{}-{}.tar.gz".format(project.name, "1.0")
+
+ db_request.user = user
+ db_request.client_addr = "10.10.10.20"
+ db_request.POST = MultiDict({
+ "metadata_version": "1.2",
+ "name": project.name,
+ "version": "1.0",
+ "summary": "This is my summary!",
+ "filetype": "sdist",
+ "md5_digest": "335c476dc930b959dda9ec82bd65ef19",
+ "content": pretend.stub(
+ filename=filename,
+ file=io.BytesIO(b"A fake file."),
+ type="application/tar",
+ ),
+ })
+ db_request.POST.extend([
+ ("classifiers", "Environment :: Other Environment"),
+ ("classifiers", "Programming Language :: Python"),
+ ("requires_dist", "foo"),
+ ("requires_dist", "bar (>1.0)"),
+ ("project_urls", "Test, https://example.com/"),
+ ("requires_external", "Cheese (>1.0)"),
+ ("provides", "testing"),
+ ])
+
+ storage_service = pretend.stub(store=lambda path, filepath, meta: None)
+ db_request.find_service = lambda svc: storage_service
+
+ resp = legacy.file_upload(db_request)
+
+ assert resp.status_code == 200
+
+ # Ensure that a Release object has been created and is not hidden.
+ release = (
+ db_request.db.query(Release)
+ .filter((Release.project == project) &
+ (Release.version == "1.0"))
+ .one()
+ )
+ assert not release._pypi_hidden
+
+ # Ensure that all the old release objects are hidden.
+ other_releases = (
+ db_request.db.query(Release)
+ .filter((Release.project == project) &
+ (Release.version != "1.0"))
+ .all()
+ )
+ assert len(other_releases)
+ for r in other_releases:
+ assert r._pypi_hidden
+
+ def test_doesnt_autohides_old_releases(self, pyramid_config, db_request):
+ pyramid_config.testing_securitypolicy(userid=1)
+
+ user = UserFactory.create()
+ project = ProjectFactory.create(autohide=False)
+ previous_releases = {
+ "0.5": ReleaseFactory.create(
+ project=project,
+ version="0.5",
+ _pypi_hidden=False,
+ ),
+ "0.75": ReleaseFactory.create(
+ project=project,
+ version="0.75",
+ _pypi_hidden=False,
+ ),
+ }
+ RoleFactory.create(user=user, project=project)
+
+ db_request.db.add(
+ Classifier(classifier="Environment :: Other Environment"),
+ )
+ db_request.db.add(
+ Classifier(classifier="Programming Language :: Python"),
+ )
+
+ filename = "{}-{}.tar.gz".format(project.name, "1.0")
+
+ db_request.user = user
+ db_request.client_addr = "10.10.10.20"
+ db_request.POST = MultiDict({
+ "metadata_version": "1.2",
+ "name": project.name,
+ "version": "1.0",
+ "summary": "This is my summary!",
+ "filetype": "sdist",
+ "md5_digest": "335c476dc930b959dda9ec82bd65ef19",
+ "content": pretend.stub(
+ filename=filename,
+ file=io.BytesIO(b"A fake file."),
+ type="application/tar",
+ ),
+ })
+ db_request.POST.extend([
+ ("classifiers", "Environment :: Other Environment"),
+ ("classifiers", "Programming Language :: Python"),
+ ("requires_dist", "foo"),
+ ("requires_dist", "bar (>1.0)"),
+ ("project_urls", "Test, https://example.com/"),
+ ("requires_external", "Cheese (>1.0)"),
+ ("provides", "testing"),
+ ])
+
+ storage_service = pretend.stub(store=lambda path, filepath, meta: None)
+ db_request.find_service = lambda svc: storage_service
+
+ resp = legacy.file_upload(db_request)
+
+ assert resp.status_code == 200
+
+ # Ensure that a Release object has been created and is not hidden.
+ release = (
+ db_request.db.query(Release)
+ .filter((Release.project == project) &
+ (Release.version == "1.0"))
+ .one()
+ )
+ assert not release._pypi_hidden
+
+ # Ensure that all the old release objects still have the same hidden
+ # state.
+ other_releases = (
+ db_request.db.query(Release)
+ .filter((Release.project == project) &
+ (Release.version != "1.0"))
+ .all()
+ )
+ assert len(other_releases)
+ for r in other_releases:
+ assert r._pypi_hidden == previous_releases[r.version]._pypi_hidden
+
@pytest.mark.parametrize("status", [True, False])
def test_legacy_purge(monkeypatch, status):
| Uploads to pypi.io don't trigger auto-hide old releases functionality in pypi.python.org
It appears that [uploads to pypi.io are only syncing json metadata](https://github.com/pypa/setuptools/issues/589#issuecomment-223615124) and not updating the latest package [at the canonical location](https://pypi.python.org/pypi/setuptools).
I've tried [managing the project](https://pypi.python.org/pypi?%3Aaction=pkg_edit&name=setuptools) to see what releases might be present, but that URL returns 500 after 30 seconds (probably a timeout), so I'm unable to assess exactly why pypi.python.org isn't showing the latest release.
| So the management interface did finally load, and as I suspected, the new releases are present in pypi.python.org; it's just that the older versions are not hidden:
![screen shot 2016-06-03 at 13 14 27](https://cloud.githubusercontent.com/assets/308610/15786937/32594e5a-298d-11e6-993c-9b7d0c11d40c.png)
Ah yes, Warehouse doesn't use the hidden flag, so it didn't occur to me to update that.
| 2016-06-05T02:55:10Z | [] | [] |
pypi/warehouse | 1,242 | pypi__warehouse-1242 | [
"1236"
] | 92e1b93535776875044bdfba5b5cf5d007974cb6 | diff --git a/warehouse/views.py b/warehouse/views.py
--- a/warehouse/views.py
+++ b/warehouse/views.py
@@ -14,6 +14,7 @@
from pyramid.httpexceptions import (
HTTPException, HTTPSeeOther, HTTPMovedPermanently, HTTPNotFound,
+ HTTPBadRequest,
)
from pyramid.view import (
notfound_view_config, forbidden_view_config, view_config,
@@ -192,7 +193,11 @@ def search(request):
if request.params.getall("c"):
query = query.filter("terms", classifiers=request.params.getall("c"))
- page_num = int(request.params.get("page", 1))
+ try:
+ page_num = int(request.params.get("page", 1))
+ except ValueError:
+ raise HTTPBadRequest("'page' must be an integer.")
+
page = ElasticsearchPage(
query,
page=page_num,
| diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py
--- a/tests/unit/test_views.py
+++ b/tests/unit/test_views.py
@@ -17,7 +17,9 @@
from elasticsearch_dsl import Q
from webob.multidict import MultiDict
-from pyramid.httpexceptions import HTTPNotFound
+from pyramid.httpexceptions import (
+ HTTPNotFound, HTTPBadRequest,
+)
from warehouse import views
from warehouse.views import (
@@ -386,6 +388,26 @@ def test_raises_404_with_pagenum_too_high(self, monkeypatch, db_request):
]
assert url_maker_factory.calls == [pretend.call(db_request)]
+ def test_raises_400_with_pagenum_type_str(self, monkeypatch, db_request):
+ params = MultiDict({"page": "abc"})
+ db_request.params = params
+
+ es_query = pretend.stub()
+ db_request.es = pretend.stub(query=lambda *a, **kw: es_query)
+
+ page_obj = pretend.stub(page_count=10)
+ page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
+ monkeypatch.setattr(views, "ElasticsearchPage", page_cls)
+
+ url_maker = pretend.stub()
+ url_maker_factory = pretend.call_recorder(lambda request: url_maker)
+ monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)
+
+ with pytest.raises(HTTPBadRequest):
+ search(db_request)
+
+ assert page_cls.calls == []
+
def test_health():
request = pretend.stub(
| non-integer page params should result in a 4xx not a 500
https://github.com/pypa/warehouse/blob/master/warehouse/views.py#L195
Right now if you pass anything that's not an int, you get a 500.
| Just for information, here's the traceback generated:
```
ValueError: invalid literal for int() with base 10: '25\'"'
File "warehouse/raven.py", line 41, in raven_tween
return handler(request)
File "pyramid/tweens.py", line 62, in excview_tween
reraise(*attrs['exc_info'])
File "pyramid/compat.py", line 148, in reraise
raise value
File "pyramid/tweens.py", line 22, in excview_tween
response = handler(request)
File "pyramid_tm/__init__.py", line 101, in tm_tween
reraise(*exc_info)
File "pyramid_tm/compat.py", line 15, in reraise
raise value
File "pyramid_tm/__init__.py", line 83, in tm_tween
response = handler(request)
File "warehouse/cache/http.py", line 69, in conditional_http_tween
response = handler(request)
File "pyramid/router.py", line 158, in handle_request
view_name
File "pyramid/view.py", line 547, in _call_view
response = view_callable(context, request)
File "warehouse/sessions.py", line 291, in wrapped
return view(context, request)
File "warehouse/csrf.py", line 38, in wrapped
return view(context, request)
File "pyramid/viewderivers.py", line 512, in csrf_view
return view(context, request)
File "warehouse/cache/origin/__init__.py", line 81, in wrapped
return view(context, request)
File "pyramid/viewderivers.py", line 442, in rendered_view
result = view(context, request)
File "pyramid/viewderivers.py", line 147, in _requestonly_view
response = view(request)
File "warehouse/views.py", line 195, in search
page_num = int(request.params.get("page", 1))
```
| 2016-06-07T00:11:59Z | [] | [] |
pypi/warehouse | 1,246 | pypi__warehouse-1246 | [
"71",
"71"
] | ddec0f66ef10e37f41190b55d27e47c0805b7784 | diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py
--- a/warehouse/packaging/models.py
+++ b/warehouse/packaging/models.py
@@ -12,7 +12,10 @@
import enum
-from collections import OrderedDict
+from collections import (
+ OrderedDict,
+ defaultdict,
+)
from citext import CIText
from pyramid.security import Allow
@@ -265,6 +268,15 @@ def __table_args__(cls): # noqa
)
classifiers = association_proxy("_classifiers", "classifier")
+ @property
+ def structured_classifiers(self):
+ structured = defaultdict(list)
+ for classifier in self.classifiers:
+ key, *value = classifier.split(' :: ', 1)
+ if value:
+ structured[key].append(value[0])
+ return structured
+
files = orm.relationship(
"File",
backref="release",
| diff --git a/tests/unit/packaging/test_models.py b/tests/unit/packaging/test_models.py
--- a/tests/unit/packaging/test_models.py
+++ b/tests/unit/packaging/test_models.py
@@ -25,6 +25,9 @@
ProjectFactory as DBProjectFactory, ReleaseFactory as DBReleaseFactory,
FileFactory as DBFileFactory, RoleFactory as DBRoleFactory,
)
+from ...common.db.classifiers import (
+ ClassifierFactory as DBClassifierFactory,
+)
class TestProjectFactory:
@@ -216,6 +219,26 @@ def test_urls(self, db_session, home_page, download_url, project_urls,
# TODO: It'd be nice to test for the actual ordering here.
assert dict(release.urls) == dict(expected)
+ def test_structured_classifiers(self, db_session):
+ release = DBReleaseFactory.create()
+ DBClassifierFactory.create(
+ classifier='Foo :: Bar :: Baz',
+ project_releases=[release],
+ )
+ DBClassifierFactory.create(
+ classifier='Foo :: Bar :: Qux',
+ project_releases=[release],
+ )
+ DBClassifierFactory.create(
+ classifier='Vleep',
+ project_releases=[release],
+ )
+ expected = {
+ 'Foo': ['Bar :: Baz', 'Bar :: Qux'],
+ }
+
+ assert release.structured_classifiers == expected
+
class TestFile:
| Link classifiers in the project description
The classifiers need to be linked to some sort of "browse" page.
Link classifiers in the project description
The classifiers need to be linked to some sort of "browse" page.
| Actually, we're not currently linking to the classifiers at all... Maybe this is something we need to look at. Perhaps on the 'similar packages' tab??
We can now link to a search for a single classifier like this: https://warehouse.python.org/search/?c=Development+Status+%3A%3A+1+-+Planning
Where would we be linking _from_? An additional tab on the project description page?
This issue was opened before the new design. The new design doesn't list the classifiers anywhere, but it probably should. See #1061.
Actually, we're not currently linking to the classifiers at all... Maybe this is something we need to look at. Perhaps on the 'similar packages' tab??
We can now link to a search for a single classifier like this: https://warehouse.python.org/search/?c=Development+Status+%3A%3A+1+-+Planning
Where would we be linking _from_? An additional tab on the project description page?
This issue was opened before the new design. The new design doesn't list the classifiers anywhere, but it probably should. See #1061.
| 2016-06-08T03:19:07Z | [] | [] |
pypi/warehouse | 1,329 | pypi__warehouse-1329 | [
"779"
] | 49669f76c32d4140b9f6433059ba1492f2cf388d | diff --git a/warehouse/accounts/models.py b/warehouse/accounts/models.py
--- a/warehouse/accounts/models.py
+++ b/warehouse/accounts/models.py
@@ -16,6 +16,7 @@
Boolean, DateTime, Integer, String,
)
from sqlalchemy import orm, select, sql
+from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.ext.hybrid import hybrid_property
@@ -38,7 +39,7 @@ def __getitem__(self, username):
raise KeyError from None
-class User(SitemapMixin, db.ModelBase):
+class User(SitemapMixin, db.Model):
__tablename__ = "accounts_user"
__table_args__ = (
@@ -51,7 +52,6 @@ class User(SitemapMixin, db.ModelBase):
__repr__ = make_repr("username")
- id = Column(Integer, primary_key=True, nullable=False)
username = Column(CIText, nullable=False, unique=True)
name = Column(String(length=100), nullable=False)
password = Column(String(length=128), nullable=False)
@@ -104,12 +104,8 @@ class Email(db.ModelBase):
id = Column(Integer, primary_key=True, nullable=False)
user_id = Column(
- Integer,
- ForeignKey(
- "accounts_user.id",
- deferrable=True,
- initially="DEFERRED",
- ),
+ UUID(as_uuid=True),
+ ForeignKey("accounts_user.id", deferrable=True, initially="DEFERRED"),
nullable=False,
)
email = Column(String(length=254), nullable=False)
diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py
--- a/warehouse/accounts/views.py
+++ b/warehouse/accounts/views.py
@@ -12,6 +12,7 @@
import datetime
+from pyblake2 import blake2b
from pyramid.httpexceptions import HTTPMovedPermanently, HTTPSeeOther
from pyramid.security import remember, forget
from pyramid.view import view_config
@@ -25,6 +26,9 @@
from warehouse.utils.http import is_safe_url
+USER_ID_INSECURE_COOKIE = "user_id__insecure"
+
+
@view_config(
route_name="accounts.profile",
renderer="accounts/profile.html",
@@ -91,7 +95,23 @@ def login(request, redirect_field_name=REDIRECT_FIELD_NAME,
# Now that we're logged in we'll want to redirect the user to either
# where they were trying to go originally, or to the default view.
- return HTTPSeeOther(redirect_to, headers=dict(headers))
+ resp = HTTPSeeOther(redirect_to, headers=dict(headers))
+
+ # We'll use this cookie so that client side javascript can Determine
+ # the actual user ID (not username, user ID). This is *not* a security
+ # sensitive context and it *MUST* not be used where security matters.
+ #
+ # We'll also hash this value just to avoid leaking the actual User IDs
+ # here, even though it really shouldn't matter.
+ resp.set_cookie(
+ USER_ID_INSECURE_COOKIE,
+ blake2b(
+ str(userid).encode("ascii"),
+ person=b"warehouse.userid",
+ ).hexdigest().lower(),
+ )
+
+ return resp
return {
"form": form,
@@ -141,7 +161,13 @@ def logout(request, redirect_field_name=REDIRECT_FIELD_NAME):
# Now that we're logged out we'll want to redirect the user to either
# where they were originally, or to the default view.
- return HTTPSeeOther(redirect_to, headers=dict(headers))
+ resp = HTTPSeeOther(redirect_to, headers=dict(headers))
+
+ # Ensure that we delete our user_id__insecure cookie, since the user is
+ # no longer logged in.
+ resp.delete_cookie(USER_ID_INSECURE_COOKIE)
+
+ return resp
return {"redirect": {"field": REDIRECT_FIELD_NAME, "data": redirect_to}}
@@ -213,7 +239,7 @@ def _login_user(request, userid):
request.session.update(data)
# Remember the userid using the authentication policy.
- headers = remember(request, userid)
+ headers = remember(request, str(userid))
# Cycle the CSRF token since we've crossed an authentication boundary
# and we don't want to continue using the old one.
diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -142,6 +142,7 @@ def configure(settings=None):
maybe_set(settings, "camo.url", "CAMO_URL")
maybe_set(settings, "camo.key", "CAMO_KEY")
maybe_set(settings, "docs.url", "DOCS_URL")
+ maybe_set(settings, "ga.tracking_id", "GA_TRACKING_ID")
maybe_set_compound(settings, "files", "backend", "FILES_BACKEND")
maybe_set_compound(settings, "origin_cache", "backend", "ORIGIN_CACHE")
diff --git a/warehouse/csp.py b/warehouse/csp.py
--- a/warehouse/csp.py
+++ b/warehouse/csp.py
@@ -70,7 +70,7 @@ def includeme(config):
],
"referrer": ["origin-when-cross-origin"],
"reflected-xss": ["block"],
- "script-src": [SELF],
+ "script-src": [SELF, "www.google-analytics.com"],
"style-src": [SELF, "fonts.googleapis.com"],
},
})
diff --git a/warehouse/legacy/tables.py b/warehouse/legacy/tables.py
--- a/warehouse/legacy/tables.py
+++ b/warehouse/legacy/tables.py
@@ -23,6 +23,7 @@
UniqueConstraint,
Boolean, Date, DateTime, Integer, LargeBinary, String, Text,
)
+from sqlalchemy.dialects.postgresql import UUID
from warehouse import db
@@ -34,12 +35,8 @@
Column("id", Integer(), primary_key=True, nullable=False),
Column(
"user_id",
- Integer(),
- ForeignKey(
- "accounts_user.id",
- deferrable=True,
- initially="DEFERRED",
- ),
+ UUID(as_uuid=True),
+ ForeignKey("accounts_user.id", deferrable=True, initially="DEFERRED"),
nullable=False,
),
Column("key_id", CIText(), nullable=False),
diff --git a/warehouse/migrations/versions/8c8be2c0e69e_switch_to_a_uuid_based_primary_key_for_.py b/warehouse/migrations/versions/8c8be2c0e69e_switch_to_a_uuid_based_primary_key_for_.py
new file mode 100644
--- /dev/null
+++ b/warehouse/migrations/versions/8c8be2c0e69e_switch_to_a_uuid_based_primary_key_for_.py
@@ -0,0 +1,113 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Switch to a UUID based primary key for User
+
+Revision ID: 8c8be2c0e69e
+Revises: 039f45e2dbf9
+Create Date: 2016-07-01 18:20:42.072664
+"""
+
+
+import sqlalchemy as sa
+
+from alembic import op
+from sqlalchemy.dialects import postgresql
+
+
+revision = "8c8be2c0e69e"
+down_revision = "039f45e2dbf9"
+
+
+def upgrade():
+ # Add a new column which is going to hold all of our new IDs for this table
+ # with a temporary name until we can rename it.
+ op.add_column(
+ "accounts_user",
+ sa.Column(
+ "new_id",
+ postgresql.UUID(as_uuid=True),
+ server_default=sa.text("gen_random_uuid()"),
+ nullable=False,
+ ),
+ )
+
+ # Add a column to tables that refer to accounts_user so they can be updated
+ # to refer to it.
+ op.add_column(
+ "accounts_email",
+ sa.Column("new_user_id", postgresql.UUID(as_uuid=True), nullable=True),
+ )
+ op.add_column(
+ "accounts_gpgkey",
+ sa.Column("new_user_id", postgresql.UUID(as_uuid=True), nullable=True),
+ )
+
+ # Update our referring tables so that their new column points to the
+ # correct user account.
+ op.execute(
+ """ UPDATE accounts_email
+ SET new_user_id = accounts_user.new_id
+ FROM accounts_user
+ WHERE accounts_email.user_id = accounts_user.id
+ """
+ )
+ op.execute(
+ """ UPDATE accounts_gpgkey
+ SET new_user_id = accounts_user.new_id
+ FROM accounts_user
+ WHERE accounts_gpgkey.user_id = accounts_user.id
+ """
+ )
+
+ # Disallow any NULL values in our referring tables
+ op.alter_column("accounts_email", "new_user_id", nullable=False)
+ op.alter_column("accounts_gpgkey", "new_user_id", nullable=False)
+
+ # Delete our existing fields and move our new fields into their old places.
+ op.drop_constraint("accounts_email_user_id_fkey", "accounts_email")
+ op.drop_column("accounts_email", "user_id")
+ op.alter_column("accounts_email", "new_user_id", new_column_name="user_id")
+
+ op.drop_constraint("accounts_gpgkey_user_id_fkey", "accounts_gpgkey")
+ op.drop_column("accounts_gpgkey", "user_id")
+ op.alter_column(
+ "accounts_gpgkey", "new_user_id", new_column_name="user_id")
+
+ # Switch the primary key from the old to the new field, drop the old name,
+ # and rename the new field into it's place.
+ op.drop_constraint("accounts_user_pkey", "accounts_user")
+ op.create_primary_key(None, "accounts_user", ["new_id"])
+ op.drop_column("accounts_user", "id")
+ op.alter_column("accounts_user", "new_id", new_column_name="id")
+
+ # Finally, Setup our foreign key constraints for our referring tables.
+ op.create_foreign_key(
+ None,
+ "accounts_email",
+ "accounts_user",
+ ["user_id"],
+ ["id"],
+ deferrable=True,
+ )
+ op.create_foreign_key(
+ None,
+ "accounts_gpgkey",
+ "accounts_user",
+ ["user_id"],
+ ["id"],
+ deferrable=True,
+ )
+
+
+def downgrade():
+ raise RuntimeError("Order No. 227 - Ни шагу назад!")
| diff --git a/tests/unit/accounts/test_services.py b/tests/unit/accounts/test_services.py
--- a/tests/unit/accounts/test_services.py
+++ b/tests/unit/accounts/test_services.py
@@ -10,6 +10,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import uuid
+
import pretend
from zope.interface.verify import verifyClass
@@ -60,7 +62,7 @@ def test_find_userid_existing_user(self, db_session):
def test_check_password_nonexistant_user(self, db_session):
service = services.DatabaseUserService(db_session)
- assert not service.check_password(1, None)
+ assert not service.check_password(uuid.uuid4(), None)
def test_check_password_invalid(self, db_session):
user = UserFactory.create()
diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py
--- a/tests/unit/accounts/test_views.py
+++ b/tests/unit/accounts/test_views.py
@@ -11,6 +11,7 @@
# limitations under the License.
import datetime
+import uuid
import freezegun
import pretend
@@ -118,8 +119,9 @@ def test_post_validate_redirects(self, monkeypatch, pyramid_request,
new_session = {}
+ user_id = uuid.uuid4()
user_service = pretend.stub(
- find_userid=pretend.call_recorder(lambda username: 1),
+ find_userid=pretend.call_recorder(lambda username: user_id),
update_user=pretend.call_recorder(lambda *a, **kw: None),
)
pyramid_request.find_service = pretend.call_recorder(
@@ -134,7 +136,7 @@ def test_post_validate_redirects(self, monkeypatch, pyramid_request,
)
pyramid_request.set_property(
- lambda r: 1234 if with_user else None,
+ lambda r: str(uuid.uuid4()) if with_user else None,
name="unauthenticated_userid",
)
@@ -161,7 +163,7 @@ def test_post_validate_redirects(self, monkeypatch, pyramid_request,
assert user_service.find_userid.calls == [pretend.call("theuser")]
assert user_service.update_user.calls == [
- pretend.call(1, last_login=now),
+ pretend.call(user_id, last_login=now),
]
if with_user:
@@ -169,7 +171,7 @@ def test_post_validate_redirects(self, monkeypatch, pyramid_request,
else:
assert new_session == {"a": "b", "foo": "bar"}
- assert remember.calls == [pretend.call(pyramid_request, 1)]
+ assert remember.calls == [pretend.call(pyramid_request, str(user_id))]
assert pyramid_request.session.invalidate.calls == [pretend.call()]
assert pyramid_request.find_service.calls == [
pretend.call(IUserService, context=None),
diff --git a/tests/unit/test_csp.py b/tests/unit/test_csp.py
--- a/tests/unit/test_csp.py
+++ b/tests/unit/test_csp.py
@@ -205,7 +205,7 @@ def test_includeme():
],
"referrer": ["origin-when-cross-origin"],
"reflected-xss": ["block"],
- "script-src": ["'self'"],
+ "script-src": ["'self'", "www.google-analytics.com"],
"style-src": ["'self'", "fonts.googleapis.com"],
},
})
| Add Browser Analytics
We'll want to add Google Analytics or something so that we can see traffic and browsers and such. This will most likely largely be done in `pypi-theme`.
| Surely we want this to be active on launch?
Yea we want this active on launch, but it's trivial to do so it shouldn't be hard, it'll just be dropping in a bit of JS into the template. The only hard part is our CSP policy.
I asked for some clarification in IRC in `#pypa-dev` from Donald just now, and learned:
We already have metrics on legacy PyPI, so it's important to have analytics active in Warehouse at launch, so we don't miss data during the switch.
To abide by the content security policy, we need to figure out how to do GA without chunking some inline JS; our CSP policy instructs browsers to reject inline JavaScript in the HTML, only [allowing] linked scripts. In the worst case, we could relax the restrictions, but Donald would rather not do that.
| 2016-07-01T19:47:42Z | [] | [] |
pypi/warehouse | 1,335 | pypi__warehouse-1335 | [
"836"
] | 5cca1927dc50a2ba0c5306677dfac49d5e60e0c9 | diff --git a/warehouse/i18n/__init__.py b/warehouse/i18n/__init__.py
--- a/warehouse/i18n/__init__.py
+++ b/warehouse/i18n/__init__.py
@@ -35,7 +35,3 @@ def includeme(config):
"format_rfc822_datetime",
"warehouse.i18n.filters:format_rfc822_datetime",
)
-
- # Register our utility functions with Jinja2
- jglobals = config.get_settings().setdefault("jinja2.globals", {})
- jglobals.setdefault("l20n", "warehouse.i18n.l20n:l20n")
diff --git a/warehouse/i18n/l20n.py b/warehouse/i18n/l20n.py
deleted file mode 100644
--- a/warehouse/i18n/l20n.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import jinja2
-
-from markupsafe import Markup as M # noqa
-
-from warehouse.filters import tojson
-
-
-_L20N_TEMPLATE = jinja2.Template(
- 'data-l10n-id="{{ tid }}"'
- '{% if data %} data-l10n-args="{{ data }}"{% endif %}',
- autoescape=True,
-)
-
-
-def l20n(tid, **kwargs):
- data = tojson(kwargs) if kwargs else None
- return M(_L20N_TEMPLATE.render(tid=tid, data=data))
| diff --git a/tests/unit/i18n/test_init.py b/tests/unit/i18n/test_init.py
--- a/tests/unit/i18n/test_init.py
+++ b/tests/unit/i18n/test_init.py
@@ -46,7 +46,4 @@ def test_includeme():
"format_rfc822_datetime":
"warehouse.i18n.filters:format_rfc822_datetime",
},
- "jinja2.globals": {
- "l20n": "warehouse.i18n.l20n:l20n",
- },
}
diff --git a/tests/unit/i18n/test_l20n.py b/tests/unit/i18n/test_l20n.py
deleted file mode 100644
--- a/tests/unit/i18n/test_l20n.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-
-from warehouse.i18n import l20n
-
-
-@pytest.mark.parametrize(
- ("tid", "args", "expected"),
- [
- ("foo", {}, 'data-l10n-id="foo"'),
- (
- "bar",
- {"thing": "other"},
- 'data-l10n-id="bar" '
- 'data-l10n-args="{"thing":"other"}"',
- ),
- ],
-)
-def test_l20n(tid, args, expected):
- assert l20n.l20n(tid, **args) == expected
| Handle Translation of the <title> tag
Currently we can't really translate the `<title>` tag because our block doesn't allow a template to add a l20n ID to it.
| Gonna drop this out of the Become PyPI, since we're not going to have any translations for launch we can figure this out later.
| 2016-07-01T23:18:26Z | [] | [] |
pypi/warehouse | 1,344 | pypi__warehouse-1344 | [
"604"
] | ddf741e3b2226a42cf29bdeda4456767a7ed1efe | diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -143,6 +143,7 @@ def configure(settings=None):
maybe_set(settings, "camo.key", "CAMO_KEY")
maybe_set(settings, "docs.url", "DOCS_URL")
maybe_set(settings, "ga.tracking_id", "GA_TRACKING_ID")
+ maybe_set(settings, "statuspage.url", "STATUSPAGE_URL")
maybe_set_compound(settings, "files", "backend", "FILES_BACKEND")
maybe_set_compound(settings, "origin_cache", "backend", "ORIGIN_CACHE")
diff --git a/warehouse/csp.py b/warehouse/csp.py
--- a/warehouse/csp.py
+++ b/warehouse/csp.py
@@ -57,7 +57,7 @@ def includeme(config):
"csp": {
"base-uri": [SELF],
"block-all-mixed-content": [],
- "connect-src": [SELF],
+ "connect-src": [SELF, config.registry.settings["statuspage.url"]],
"default-src": [NONE],
"font-src": [SELF, "fonts.gstatic.com"],
"form-action": [SELF],
| diff --git a/tests/conftest.py b/tests/conftest.py
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -140,6 +140,7 @@ def app_config(database):
"files.url": "http://localhost:7000/",
"sessions.secret": "123456",
"sessions.url": "redis://localhost:0/",
+ "statuspage.url": "https://2p66nmmycsj3.statuspage.io",
},
)
diff --git a/tests/unit/test_csp.py b/tests/unit/test_csp.py
--- a/tests/unit/test_csp.py
+++ b/tests/unit/test_csp.py
@@ -175,7 +175,10 @@ def test_includeme():
lambda fact, name: None),
add_settings=pretend.call_recorder(lambda settings: None),
add_tween=pretend.call_recorder(lambda tween: None),
- registry=pretend.stub(settings={"camo.url": "camo.url.value"}),
+ registry=pretend.stub(settings={
+ "camo.url": "camo.url.value",
+ "statuspage.url": "https://2p66nmmycsj3.statuspage.io",
+ }),
)
csp.includeme(config)
@@ -192,7 +195,10 @@ def test_includeme():
"csp": {
"base-uri": ["'self'"],
"block-all-mixed-content": [],
- "connect-src": ["'self'"],
+ "connect-src": [
+ "'self'",
+ "https://2p66nmmycsj3.statuspage.io",
+ ],
"default-src": ["'none'"],
"font-src": ["'self'", "fonts.gstatic.com"],
"form-action": ["'self'"],
| Integrate statuspage.io
We'll want to integrate statuspage.io so that people can get a link to the status.python.org incidents when one is occuring. They have a web API that we are currently using via javascript on pypi.python.org.
See: https://status-page-blog.herokuapp.com/embed-system-status-within-your-webapp
| Design specification:
![screenshot from 2016-06-30 06-52-09](https://cloud.githubusercontent.com/assets/3323703/16478074/36753672-3e8f-11e6-8a1b-7beefb0ea379.png)
| 2016-07-02T19:33:47Z | [] | [] |
pypi/warehouse | 1,352 | pypi__warehouse-1352 | [
"1347",
"1347"
] | 98d3b39db10005a936d8e56ab059d90fac436f79 | diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -218,6 +218,10 @@ def configure(settings=None):
filters.setdefault("readme", "warehouse.filters:readme")
filters.setdefault("shorten_number", "warehouse.filters:shorten_number")
filters.setdefault("urlparse", "warehouse.filters:urlparse")
+ filters.setdefault(
+ "contains_valid_uris",
+ "warehouse.filters:contains_valid_uris"
+ )
# We also want to register some global functions for Jinja
jglobals = config.get_settings().setdefault("jinja2.globals", {})
diff --git a/warehouse/filters.py b/warehouse/filters.py
--- a/warehouse/filters.py
+++ b/warehouse/filters.py
@@ -27,6 +27,8 @@
from pyramid.threadlocal import get_current_request
+from warehouse.utils.http import is_valid_uri
+
def _camo_url(camo_url, camo_key, url):
camo_key = camo_key.encode("utf8")
@@ -136,3 +138,10 @@ def format_classifiers(classifiers):
structured = collections.OrderedDict(sorted(structured.items()))
return structured
+
+
+def contains_valid_uris(items):
+ """Returns boolean representing whether the input list contains any valid
+ URIs
+ """
+ return any(is_valid_uri(i) for i in items)
| diff --git a/tests/functional/test_templates.py b/tests/functional/test_templates.py
--- a/tests/functional/test_templates.py
+++ b/tests/functional/test_templates.py
@@ -39,6 +39,7 @@ def test_templates_for_empty_titles():
"readme": "warehouse.filters:readme",
"shorten_number": "warehouse.filters:shorten_number",
"urlparse": "warehouse.filters:urlparse",
+ "contains_valid_uris": "warehouse.filters:contains_valid_uris"
})
for dir_, _, files in os.walk(dir_name):
diff --git a/tests/unit/test_filters.py b/tests/unit/test_filters.py
--- a/tests/unit/test_filters.py
+++ b/tests/unit/test_filters.py
@@ -207,3 +207,24 @@ def test_format_tags(inp, expected):
)
def test_format_classifiers(inp, expected):
assert list(filters.format_classifiers(inp).items()) == expected
+
+
+@pytest.mark.parametrize(
+ ("inp", "expected"),
+ [
+ (
+ ["abcdef", "ghijkl"],
+ False
+ ),
+ (
+ ["https://github.com/example/test", "https://pypi.io/"],
+ True
+ ),
+ (
+ ["abcdef", "https://github.com/example/test"],
+ True
+ )
+ ]
+)
+def test_contains_valid_uris(inp, expected):
+ assert filters.contains_valid_uris(inp) == expected
| Do not show links bar if no links are present on the project
We want to avoid this situation (see second grey - empty - div):
![screenshot from 2016-07-02 21-32-31](https://cloud.githubusercontent.com/assets/3323703/16542367/941c45f4-409c-11e6-8813-e006537601ac.png)
This should be as simple as wrapping the div in an if statement checking that links exist.
Do not show links bar if no links are present on the project
We want to avoid this situation (see second grey - empty - div):
![screenshot from 2016-07-02 21-32-31](https://cloud.githubusercontent.com/assets/3323703/16542367/941c45f4-409c-11e6-8813-e006537601ac.png)
This should be as simple as wrapping the div in an if statement checking that links exist.
| 2016-07-03T00:16:20Z | [] | [] |
|
pypi/warehouse | 1,407 | pypi__warehouse-1407 | [
"1386"
] | 7acd3cb48da3485e88814979c0ae8eae785add7b | diff --git a/warehouse/views.py b/warehouse/views.py
--- a/warehouse/views.py
+++ b/warehouse/views.py
@@ -22,13 +22,16 @@
from elasticsearch_dsl import Q
from sqlalchemy import func
from sqlalchemy.orm import aliased, joinedload
+from sqlalchemy.sql import exists
from warehouse.accounts import REDIRECT_FIELD_NAME
from warehouse.accounts.models import User
from warehouse.cache.origin import origin_cache
from warehouse.cache.http import cache_control
from warehouse.classifiers.models import Classifier
-from warehouse.packaging.models import Project, Release, File
+from warehouse.packaging.models import (
+ Project, Release, File, release_classifiers,
+)
from warehouse.utils.row_counter import RowCount
from warehouse.utils.paginate import ElasticsearchPage, paginate_url_factory
@@ -218,7 +221,17 @@ def search(request):
available_filters = collections.defaultdict(list)
- for cls in request.db.query(Classifier).order_by(Classifier.classifier):
+ classifiers_q = (
+ request.db.query(Classifier)
+ .with_entities(Classifier.classifier)
+ .filter(
+ exists([release_classifiers.c.trove_id])
+ .where(release_classifiers.c.trove_id == Classifier.id)
+ )
+ .order_by(Classifier.classifier)
+ )
+
+ for cls in classifiers_q:
first, *_ = cls.classifier.split(' :: ')
available_filters[first].append(cls.classifier)
| diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py
--- a/tests/unit/test_views.py
+++ b/tests/unit/test_views.py
@@ -293,6 +293,12 @@ def test_with_classifiers(self, monkeypatch, db_request, page):
classifier2 = ClassifierFactory.create(classifier="foo :: baz")
classifier3 = ClassifierFactory.create(classifier="fiz :: buz")
+ project = ProjectFactory.create()
+ release1 = ReleaseFactory.create(project=project)
+ release1.created = datetime.date(2011, 1, 1)
+ release1._classifiers.append(classifier1)
+ release1._classifiers.append(classifier2)
+
page_obj = pretend.stub(page_count=(page or 1) + 10)
page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
monkeypatch.setattr(views, "ElasticsearchPage", page_cls)
@@ -301,19 +307,24 @@ def test_with_classifiers(self, monkeypatch, db_request, page):
url_maker_factory = pretend.call_recorder(lambda request: url_maker)
monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)
- assert search(db_request) == {
+ search_view = search(db_request)
+ assert search_view == {
"page": page_obj,
"term": params.get("q", ''),
"order": params.get("o", ''),
"applied_filters": params.getall("c"),
"available_filters": [
- ('fiz', [classifier3.classifier]),
('foo', [
classifier1.classifier,
classifier2.classifier,
])
],
}
+ assert (
+ ("fiz", [
+ classifier3.classifier
+ ]) not in search_view["available_filters"]
+ )
assert page_cls.calls == [
pretend.call(es_query, url_maker=url_maker, page=page or 1),
]
| Remove unused classifiers from filter list
We currently show all trove classifiers in the search filter panel, despite the fact that some are not applied to any projects in the DB.
It would be better to only show those classifiers that are actually applied to projects, so we avoid filtering by a classifier and returning an empty result.
| 2016-07-24T12:24:31Z | [] | [] |
|
pypi/warehouse | 1,429 | pypi__warehouse-1429 | [
"1399"
] | dabbdfe0ac3e32cb3a27f013d4e8886001b3806f | diff --git a/warehouse/routes.py b/warehouse/routes.py
--- a/warehouse/routes.py
+++ b/warehouse/routes.py
@@ -24,6 +24,7 @@ def includeme(config):
# Basic global routes
config.add_route("index", "/", domain=warehouse)
config.add_route("robots.txt", "/robots.txt", domain=warehouse)
+ config.add_route("opensearch.xml", "/opensearch.xml", domain=warehouse)
config.add_route("index.sitemap.xml", "/sitemap.xml", domain=warehouse)
config.add_route(
"bucket.sitemap.xml",
diff --git a/warehouse/views.py b/warehouse/views.py
--- a/warehouse/views.py
+++ b/warehouse/views.py
@@ -100,6 +100,23 @@ def robotstxt(request):
return {}
+@view_config(
+ route_name="opensearch.xml",
+ renderer="opensearch.xml",
+ decorator=[
+ cache_control(1 * 24 * 60 * 60), # 1 day
+ origin_cache(
+ 1 * 24 * 60 * 60, # 1 day
+ stale_while_revalidate=6 * 60 * 60, # 6 hours
+ stale_if_error=1 * 24 * 60 * 60, # 1 day
+ )
+ ]
+)
+def opensearchxml(request):
+ request.response.content_type = "text/xml"
+ return {}
+
+
@view_config(
route_name="index",
renderer="index.html",
| diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py
--- a/tests/unit/test_routes.py
+++ b/tests/unit/test_routes.py
@@ -70,6 +70,7 @@ def add_xmlrpc_endpoint(endpoint, pattern, header, domain=None):
pretend.call("health", "/_health/"),
pretend.call('index', '/', domain=warehouse),
pretend.call("robots.txt", "/robots.txt", domain=warehouse),
+ pretend.call("opensearch.xml", "/opensearch.xml", domain=warehouse),
pretend.call("index.sitemap.xml", "/sitemap.xml", domain=warehouse),
pretend.call(
"bucket.sitemap.xml",
diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py
--- a/tests/unit/test_views.py
+++ b/tests/unit/test_views.py
@@ -24,7 +24,7 @@
from warehouse import views
from warehouse.views import (
SEARCH_BOOSTS, SEARCH_FIELDS, current_user_indicator, forbidden, health,
- httpexception_view, index, robotstxt, search
+ httpexception_view, index, robotstxt, opensearchxml, search
)
from ..common.db.accounts import UserFactory
@@ -69,6 +69,11 @@ def test_robotstxt(pyramid_request):
assert pyramid_request.response.content_type == "text/plain"
+def test_opensearchxml(pyramid_request):
+ assert opensearchxml(pyramid_request) == {}
+ assert pyramid_request.response.content_type == "text/xml"
+
+
class TestIndex:
def test_index(self, db_request):
| Add support for opensearch
http://www.opensearch.org/
Lets you (at least from Chrome) type `pypi.io`, hit tab, and then let you type what you're searching for from the address bar instead of having to go to the site first to use the search box.
| 2016-08-16T06:57:39Z | [] | [] |
|
pypi/warehouse | 1,454 | pypi__warehouse-1454 | [
"1382"
] | f020a325608427302442998129c642c5e33eb789 | diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py
--- a/warehouse/packaging/models.py
+++ b/warehouse/packaging/models.py
@@ -342,7 +342,9 @@ def urls(self):
@property
def has_meta(self):
- return any([self.keywords])
+ return any([self.keywords,
+ self.author, self.author_email,
+ self.maintainer, self.maintainer_email])
class File(db.Model):
| diff --git a/tests/unit/packaging/test_models.py b/tests/unit/packaging/test_models.py
--- a/tests/unit/packaging/test_models.py
+++ b/tests/unit/packaging/test_models.py
@@ -107,6 +107,20 @@ def test_has_meta_true_with_keywords(self, db_session):
release = DBReleaseFactory.create(keywords="foo, bar")
assert release.has_meta
+ def test_has_meta_true_with_author(self, db_session):
+ release = DBReleaseFactory.create(author="Batman")
+ assert release.has_meta
+
+ release = DBReleaseFactory.create(author_email="wayne@gotham.ny")
+ assert release.has_meta
+
+ def test_has_meta_true_with_maintainer(self, db_session):
+ release = DBReleaseFactory.create(maintainer="Spiderman")
+ assert release.has_meta
+
+ release = DBReleaseFactory.create(maintainer_email="peter@parker.mrvl")
+ assert release.has_meta
+
def test_has_meta_false(self, db_session):
release = DBReleaseFactory.create()
assert not release.has_meta
| [WIP] Show Author and Maintainer in Project detail view
Refs #1060
@dstufft I need some input regarding the implementation of email validation. How do you want to have this implemented? Should I use a self written primitive regex? Or a package like https://pypi.python.org/pypi/validate_email ? Something else?
| 2016-09-11T06:56:15Z | [] | [] |
|
pypi/warehouse | 1,471 | pypi__warehouse-1471 | [
"1224",
"1226"
] | b7a2168c758c1350ec637f5171d9c2de6eb91942 | diff --git a/warehouse/cli/search/reindex.py b/warehouse/cli/search/reindex.py
--- a/warehouse/cli/search/reindex.py
+++ b/warehouse/cli/search/reindex.py
@@ -57,6 +57,8 @@ def reindex(config, **kwargs):
"""
client = config.registry["elasticsearch.client"]
db = Session(bind=config.registry["sqlalchemy.engine"])
+ number_of_replicas = config.registry.get("elasticsearch.replicas", 0)
+ refresh_interval = config.registry.get("elasticsearch.interval", "1s")
# We use a randomly named index so that we can do a zero downtime reindex.
# Essentially we'll use a randomly named index which we will use until all
@@ -68,12 +70,16 @@ def reindex(config, **kwargs):
random_token = binascii.hexlify(os.urandom(5)).decode("ascii")
new_index_name = "{}-{}".format(index_base, random_token)
doc_types = config.registry.get("search.doc_types", set())
+
+ # Create the new index with zero replicas and index refreshes disabled
+ # while we are bulk indexing.
new_index = get_index(
new_index_name,
doc_types,
using=client,
shards=config.registry.get("elasticsearch.shards", 1),
- replicas=config.registry.get("elasticsearch.replicas", 0),
+ replicas=0,
+ interval="-1",
)
new_index.create()
@@ -91,8 +97,20 @@ def reindex(config, **kwargs):
db.rollback()
db.close()
- # Now that we've finished indexing all of our data, we'll point the alias
- # at our new randomly named index and delete the old index.
+ # Now that we've finished indexing all of our data we can optimize it and
+ # update the replicas and refresh intervals.
+ client.indices.forcemerge(index=new_index_name)
+ client.indices.put_settings(
+ index=new_index_name,
+ body={
+ "index": {
+ "number_of_replicas": number_of_replicas,
+ "refresh_interval": refresh_interval,
+ }
+ }
+ )
+
+ # Point the alias at our new randomly named index and delete the old index.
if client.indices.exists_alias(name=index_base):
to_delete = set()
actions = []
diff --git a/warehouse/search.py b/warehouse/search.py
--- a/warehouse/search.py
+++ b/warehouse/search.py
@@ -29,11 +29,15 @@ def callback(scanner, _name, item):
return cls
-def get_index(name, doc_types, *, using, shards=1, replicas=0):
+def get_index(name, doc_types, *, using, shards=1, replicas=0, interval="1s"):
index = Index(name, using=using)
for doc_type in doc_types:
index.doc_type(doc_type)
- index.settings(number_of_shards=shards, number_of_replicas=replicas)
+ index.settings(
+ number_of_shards=shards,
+ number_of_replicas=replicas,
+ refresh_interval=interval,
+ )
return index
@@ -58,6 +62,8 @@ def includeme(config):
[urllib.parse.urlunparse(p[:2] + ("",) * 4)],
verify_certs=True,
ca_certs=certifi.where(),
+ timeout=30,
+ retry_on_timeout=True,
)
config.registry["elasticsearch.index"] = p.path.strip("/")
config.registry["elasticsearch.shards"] = int(qs.get("shards", ["1"])[0])
| diff --git a/tests/unit/cli/search/test_reindex.py b/tests/unit/cli/search/test_reindex.py
--- a/tests/unit/cli/search/test_reindex.py
+++ b/tests/unit/cli/search/test_reindex.py
@@ -54,6 +54,9 @@ def __init__(self):
self.indices = {}
self.aliases = {}
+ self.put_settings = pretend.call_recorder(lambda *a, **kw: None)
+ self.forcemerge = pretend.call_recorder(lambda *a, **kw: None)
+
def create(self, index, body):
self.indices[index] = body
@@ -146,6 +149,8 @@ def parallel_bulk(client, iterable):
assert sess_obj.rollback.calls == [pretend.call()]
assert sess_obj.close.calls == [pretend.call()]
assert es_client.indices.indices == {}
+ assert es_client.indices.put_settings.calls == []
+ assert es_client.indices.forcemerge.calls == []
def test_successfully_indexes_and_adds_new(self, monkeypatch, cli):
sess_obj = pretend.stub(
@@ -198,6 +203,20 @@ def project_docs(db):
assert es_client.indices.aliases == {
"warehouse": ["warehouse-cbcbcbcbcb"],
}
+ assert es_client.indices.put_settings.calls == [
+ pretend.call(
+ index='warehouse-cbcbcbcbcb',
+ body={
+ 'index': {
+ 'number_of_replicas': 0,
+ 'refresh_interval': '1s',
+ },
+ },
+ )
+ ]
+ assert es_client.indices.forcemerge.calls == [
+ pretend.call(index='warehouse-cbcbcbcbcb')
+ ]
def test_successfully_indexes_and_replaces(self, monkeypatch, cli):
sess_obj = pretend.stub(
@@ -252,3 +271,17 @@ def project_docs(db):
assert es_client.indices.aliases == {
"warehouse": ["warehouse-cbcbcbcbcb"],
}
+ assert es_client.indices.put_settings.calls == [
+ pretend.call(
+ index='warehouse-cbcbcbcbcb',
+ body={
+ 'index': {
+ 'number_of_replicas': 0,
+ 'refresh_interval': '1s',
+ },
+ },
+ )
+ ]
+ assert es_client.indices.forcemerge.calls == [
+ pretend.call(index='warehouse-cbcbcbcbcb')
+ ]
diff --git a/tests/unit/test_search.py b/tests/unit/test_search.py
--- a/tests/unit/test_search.py
+++ b/tests/unit/test_search.py
@@ -42,6 +42,10 @@ def test_es(monkeypatch):
assert index_cls.calls == [pretend.call("warehouse", using=client)]
assert index_obj.doc_type.calls == [pretend.call(d) for d in doc_types]
assert index_obj.settings.calls == [
- pretend.call(number_of_shards=1, number_of_replicas=0),
+ pretend.call(
+ number_of_shards=1,
+ number_of_replicas=0,
+ refresh_interval="1s",
+ )
]
assert index_obj.search.calls == [pretend.call()]
| Optimize reindexing
It's possible to tweak the index settings before and after a full reindex speed up reindexing.
Things you can do before reindexing:
- Set `num_replicas` to `0`. During reindexing this will stop the network traffic of Elasticsearch replicating your data across the nodes. This can happen in bulk when you set `num_replicas` back to a value greater than `0` after reindexing is finished.
- Set `refresh_interval` to `-1`. I.e. disable index refreshes completely during bulk indexing. (See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-update-settings.html#bulk)
Things you can do after reindexing:
- Perform a `force_merge` on the index. The defaults should be fine. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-forcemerge.html
- Set the `num_replicas` back to its default value.
- Set the `refresh_interval` back to its default value.
[WIP] Add some optimizations to reindex
WORK IN PROGRESS
I consider the code to be relatively complete (besides the tests), but local testing is revealing a lot of timeout errors connecting to the Elasticsearch cluster. I don't know if a change here causes this or something else.
| Attempted fix in https://github.com/pypa/warehouse/pull/1226
Just gonna say that the general idea and the changes look good to me, once you work out the issues :]
| 2016-10-06T16:40:05Z | [] | [] |
pypi/warehouse | 1,485 | pypi__warehouse-1485 | [
"1483"
] | be37f23776af28894727e3a1ec4a939706983879 | diff --git a/warehouse/accounts/forms.py b/warehouse/accounts/forms.py
--- a/warehouse/accounts/forms.py
+++ b/warehouse/accounts/forms.py
@@ -11,6 +11,7 @@
# limitations under the License.
import re
+import disposable_email_domains
import wtforms
import wtforms.fields.html5
@@ -84,6 +85,9 @@ def validate_username(self, field):
def validate_email(self, field):
if self.user_service.find_userid_by_email(field.data) is not None:
raise wtforms.validators.ValidationError("Email exists.")
+ domain = field.data.split('@')[-1]
+ if domain in disposable_email_domains.blacklist:
+ raise wtforms.validators.ValidationError("Disposable email.")
def validate_g_recaptcha_response(self, field):
# do required data validation here due to enabled flag being required
| diff --git a/tests/unit/accounts/test_forms.py b/tests/unit/accounts/test_forms.py
--- a/tests/unit/accounts/test_forms.py
+++ b/tests/unit/accounts/test_forms.py
@@ -204,6 +204,18 @@ def test_email_exists_error(self):
assert not form.validate()
assert form.email.errors.pop() == "Email exists."
+ def test_blacklisted_email_error(self):
+ form = forms.RegistrationForm(
+ data={"email": "foo@bearsarefuzzy.com"},
+ user_service=pretend.stub(
+ find_userid_by_email=pretend.call_recorder(lambda _: None),
+ ),
+ recaptcha_service=pretend.stub(enabled=True),
+ )
+
+ assert not form.validate()
+ assert form.email.errors.pop() == "Disposable email."
+
def test_recaptcha_disabled(self):
form = forms.RegistrationForm(
data={"g_recpatcha_response": ""},
| Refuse account creation when using disposable email addresses.
long term it makes no sense to accept accounts which use an email address which is disposable for managing legit packages. short/near term it opens an easy door for spammers to create accounts on PyPI.
i've implemented blacklisting for account signup and email swaps which use the blacklist at https://github.com/martenson/disposable-email-domains for legacy pypi.
| 2016-10-19T22:58:33Z | [] | [] |
|
pypi/warehouse | 1,519 | pypi__warehouse-1519 | [
"1503"
] | 8cd6ddb4ddd8d19c5c728817e3016bea1fc3c382 | diff --git a/warehouse/cli/search/reindex.py b/warehouse/cli/search/reindex.py
--- a/warehouse/cli/search/reindex.py
+++ b/warehouse/cli/search/reindex.py
@@ -38,7 +38,7 @@ def _project_docs(db):
(joinedload(Release.project)
.load_only("normalized_name", "name")
.joinedload(Project.releases)
- .load_only("version")),
+ .load_only("version", "is_prerelease")),
joinedload(Release._classifiers).load_only("classifier"))
.distinct(Release.name)
.order_by(Release.name, Release._pypi_ordering.desc())
diff --git a/warehouse/migrations/versions/e7b09b5c089d_add_pep440_is_prerelease.py b/warehouse/migrations/versions/e7b09b5c089d_add_pep440_is_prerelease.py
new file mode 100644
--- /dev/null
+++ b/warehouse/migrations/versions/e7b09b5c089d_add_pep440_is_prerelease.py
@@ -0,0 +1,39 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Add pep440_is_prerelease
+
+Revision ID: e7b09b5c089d
+Revises: be4cf6b58557
+Create Date: 2016-12-03 15:04:40.251609
+"""
+
+from alembic import op
+
+
+revision = "e7b09b5c089d"
+down_revision = "be4cf6b58557"
+
+
+def upgrade():
+ op.execute("""
+ CREATE FUNCTION pep440_is_prerelease(text) returns boolean as $$
+ SELECT lower($1) ~* '(a|b|rc|dev|alpha|beta|c|pre|preview)'
+ $$
+ LANGUAGE SQL
+ IMMUTABLE
+ RETURNS NULL ON NULL INPUT;
+ """)
+
+
+def downgrade():
+ op.execute("DROP FUNCTION pep440_is_prerelease")
diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py
--- a/warehouse/packaging/models.py
+++ b/warehouse/packaging/models.py
@@ -230,6 +230,7 @@ def __table_args__(cls): # noqa
primary_key=True,
)
version = Column(Text, primary_key=True)
+ is_prerelease = orm.column_property(func.pep440_is_prerelease(version))
author = Column(Text)
author_email = Column(Text)
maintainer = Column(Text)
diff --git a/warehouse/packaging/search.py b/warehouse/packaging/search.py
--- a/warehouse/packaging/search.py
+++ b/warehouse/packaging/search.py
@@ -11,6 +11,8 @@
# limitations under the License.
from elasticsearch_dsl import DocType, Text, Keyword, analyzer, MetaField, Date
+from first import first
+from packaging.version import parse as parse_version
from warehouse.search import doc_type
@@ -34,6 +36,7 @@ class Project(DocType):
name = Text()
normalized_name = Text(analyzer=NameAnalyzer, index_options="docs")
version = Keyword(multi=True)
+ latest_version = Keyword()
summary = Text(analyzer="snowball")
description = Text(analyzer="snowball")
author = Text()
@@ -57,7 +60,23 @@ def from_db(cls, release):
obj = cls(meta={"id": release.project.normalized_name})
obj["name"] = release.project.name
obj["normalized_name"] = release.project.normalized_name
- obj["version"] = [r.version for r in release.project.releases]
+ obj["version"] = [
+ r.version
+ for r in sorted(
+ release.project.releases,
+ key=lambda r: parse_version(r.version),
+ reverse=True,
+ )
+ ]
+ obj["latest_version"] = first(
+ sorted(
+ release.project.releases,
+ key=lambda r: parse_version(r.version),
+ reverse=True,
+ ),
+ key=lambda r: not r.is_prerelease,
+ default=release.project.releases[0],
+ ).version
obj["summary"] = release.summary
obj["description"] = release.description
obj["author"] = release.author
diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py
--- a/warehouse/packaging/views.py
+++ b/warehouse/packaging/views.py
@@ -10,6 +10,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from first import first
from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound
from pyramid.view import view_config
from sqlalchemy.orm.exc import NoResultFound
@@ -40,7 +41,9 @@ def project_detail(project, request):
release = (
request.db.query(Release)
.filter(Release.project == project)
- .order_by(Release._pypi_ordering.desc())
+ .order_by(
+ Release.is_prerelease.nullslast(),
+ Release._pypi_ordering.desc())
.limit(1)
.one()
)
@@ -74,11 +77,22 @@ def release_detail(release, request):
all_releases = (
request.db.query(Release)
.filter(Release.project == project)
- .with_entities(Release.version, Release.created)
+ .with_entities(
+ Release.version,
+ Release.is_prerelease,
+ Release.created)
.order_by(Release._pypi_ordering.desc())
.all()
)
+ # Get the latest non-prerelease version of this Project, or the latest
+ # of any version if there are no non-prerelease versions.
+ latest_release = first(
+ all_releases,
+ key=lambda r: not r.is_prerelease,
+ default=all_releases[0],
+ )
+
# Get all of the maintainers for this project.
maintainers = [
r.user
@@ -107,6 +121,7 @@ def release_detail(release, request):
"project": project,
"release": release,
"files": release.files.all(),
+ "latest_release": latest_release,
"all_releases": all_releases,
"maintainers": maintainers,
"license": license,
| diff --git a/tests/unit/cli/search/test_reindex.py b/tests/unit/cli/search/test_reindex.py
--- a/tests/unit/cli/search/test_reindex.py
+++ b/tests/unit/cli/search/test_reindex.py
@@ -15,6 +15,8 @@
import packaging.version
import pretend
+from first import first
+
import warehouse.cli.search.reindex
from warehouse.cli.search.reindex import reindex, _project_docs
@@ -42,6 +44,10 @@ def test_project_docs(db_session):
"name": p.name,
"normalized_name": p.normalized_name,
"version": [r.version for r in prs],
+ "latest_version": first(
+ prs,
+ key=lambda r: not r.is_prerelease,
+ ).version,
},
}
for p, prs in sorted(releases.items(), key=lambda x: x[0].name.lower())
diff --git a/tests/unit/packaging/test_search.py b/tests/unit/packaging/test_search.py
--- a/tests/unit/packaging/test_search.py
+++ b/tests/unit/packaging/test_search.py
@@ -22,10 +22,11 @@ def test_build_search():
name="Foobar",
normalized_name="foobar",
releases=[
- pretend.stub(version="1.0"),
- pretend.stub(version="2.0"),
- pretend.stub(version="3.0"),
- pretend.stub(version="4.0"),
+ pretend.stub(version="1.0", is_prerelease=False),
+ pretend.stub(version="2.0", is_prerelease=False),
+ pretend.stub(version="3.0", is_prerelease=False),
+ pretend.stub(version="4.0", is_prerelease=False),
+ pretend.stub(version="5.0.dev0", is_prerelease=True),
],
),
summary="This is my summary",
@@ -45,7 +46,8 @@ def test_build_search():
assert obj.meta.id == "foobar"
assert obj["name"] == "Foobar"
- assert obj["version"] == ["1.0", "2.0", "3.0", "4.0"]
+ assert obj["version"] == ["5.0.dev0", "4.0", "3.0", "2.0", "1.0"]
+ assert obj["latest_version"] == "4.0"
assert obj["summary"] == "This is my summary"
assert obj["description"] == "This is my description"
assert obj["author"] == "Jane Author"
diff --git a/tests/unit/packaging/test_views.py b/tests/unit/packaging/test_views.py
--- a/tests/unit/packaging/test_views.py
+++ b/tests/unit/packaging/test_views.py
@@ -12,6 +12,7 @@
import pretend
+from first import first
from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound
from warehouse.packaging import views
@@ -67,6 +68,41 @@ def test_calls_release_detail(self, monkeypatch, db_request):
assert resp is response
assert release_detail.calls == [pretend.call(release, db_request)]
+ def test_with_prereleases(self, monkeypatch, db_request):
+ project = ProjectFactory.create()
+
+ ReleaseFactory.create(project=project, version="1.0")
+ ReleaseFactory.create(project=project, version="2.0")
+ ReleaseFactory.create(project=project, version="4.0.dev0")
+
+ release = ReleaseFactory.create(project=project, version="3.0")
+
+ response = pretend.stub()
+ release_detail = pretend.call_recorder(lambda ctx, request: response)
+ monkeypatch.setattr(views, "release_detail", release_detail)
+
+ resp = views.project_detail(project, db_request)
+
+ assert resp is response
+ assert release_detail.calls == [pretend.call(release, db_request)]
+
+ def test_only_prereleases(self, monkeypatch, db_request):
+ project = ProjectFactory.create()
+
+ ReleaseFactory.create(project=project, version="1.0.dev0")
+ ReleaseFactory.create(project=project, version="2.0.dev0")
+
+ release = ReleaseFactory.create(project=project, version="3.0.dev0")
+
+ response = pretend.stub()
+ release_detail = pretend.call_recorder(lambda ctx, request: response)
+ monkeypatch.setattr(views, "release_detail", release_detail)
+
+ resp = views.project_detail(project, db_request)
+
+ assert resp is response
+ assert release_detail.calls == [pretend.call(release, db_request)]
+
class TestReleaseDetail:
@@ -100,8 +136,12 @@ def test_detail_renders(self, db_request):
project = ProjectFactory.create()
releases = [
ReleaseFactory.create(project=project, version=v)
- for v in ["1.0", "2.0", "3.0"]
+ for v in ["1.0", "2.0", "3.0", "4.0.dev0"]
]
+ latest_release = first(
+ reversed(releases),
+ key=lambda r: not r.is_prerelease,
+ )
files = [
FileFactory.create(
release=r,
@@ -128,8 +168,14 @@ def test_detail_renders(self, db_request):
"project": project,
"release": releases[1],
"files": [files[1]],
+ "latest_release": (
+ latest_release.version,
+ latest_release.is_prerelease,
+ latest_release.created,
+ ),
"all_releases": [
- (r.version, r.created) for r in reversed(releases)
+ (r.version, r.is_prerelease, r.created)
+ for r in reversed(releases)
],
"maintainers": sorted(users, key=lambda u: u.username.lower()),
"license": None
| Show pip installation instructions to include `--pre` as appropriate for latest release
E.g. https://pypi.org/project/azure/ points to 2.0.0rc6, but the instructions on the page say to do `pip install azure` which won't work due to the lack of `--pre` flag in the command.
| Nice catch. We might want to do more than just add the `--pre` flag to the command if it's a pre-release, specifically for the users who might not be familiar with pre-releases or might not want a pre-release. Perhaps:
* add a warning or label that the command will give you a pre-release?
* explain to the user that they likely don't want a pre-release?
* somehow give an alternate command to install a non-pre-release version?
* display the latest non-pre-release version instead if a version is not specified, and indicate that a pre-release is available? (this is likely not a great idea).
(I'm specifically talking about what you get at a project page such as https://pypi.org/project/azure/. On a page such as https://pypi.org/project/azure/2.0.0rc6/, I think it should just have the correct command with the `--pre` flag.)
Maybe change the first green badge to orange with "Latest pre-release, click here for latest stable"?
My two cents,
Exemple for Nuget: https://www.nuget.org/packages/Microsoft.Azure.Management.Compute/
Latest stable has a blue box:
> There is a newer prerelease version of this package available.
See the version list below for details.
And preview versions have an orange box
> This is a prerelease version of Microsoft.Azure.Management.Compute.
FWIW I had always intended for ``/project/<foo>/`` to show the latest non pre-release version (to match what would happen if someone just typed ``pip install foo``) as described in the version selecting in PEP 440. | 2016-12-03T17:44:29Z | [] | [] |
pypi/warehouse | 1,530 | pypi__warehouse-1530 | [
"707"
] | a30929f192847729ebc5a3eaf5735e579db3349f | diff --git a/warehouse/accounts/__init__.py b/warehouse/accounts/__init__.py
--- a/warehouse/accounts/__init__.py
+++ b/warehouse/accounts/__init__.py
@@ -20,6 +20,8 @@
from warehouse.accounts.auth_policy import (
BasicAuthAuthenticationPolicy, SessionAuthenticationPolicy,
)
+from warehouse.rate_limiting import RateLimit, IRateLimiter
+
REDIRECT_FIELD_NAME = 'next'
@@ -71,3 +73,16 @@ def includeme(config):
# Add a request method which will allow people to access the user object.
config.add_request_method(_user, name="user", reify=True)
+
+ # Register the rate limits that we're going to be using for our login
+ # attempts
+ config.register_service_factory(
+ RateLimit("10 per 5 minutes"),
+ IRateLimiter,
+ name="user.login",
+ )
+ config.register_service_factory(
+ RateLimit("1000 per 5 minutes"),
+ IRateLimiter,
+ name="global.login",
+ )
diff --git a/warehouse/accounts/forms.py b/warehouse/accounts/forms.py
--- a/warehouse/accounts/forms.py
+++ b/warehouse/accounts/forms.py
@@ -16,6 +16,7 @@
import wtforms.fields.html5
from warehouse import forms, recaptcha
+from warehouse.accounts.interfaces import TooManyFailedLogins
class CredentialsMixin:
@@ -119,5 +120,13 @@ def validate_username(self, field):
def validate_password(self, field):
userid = self.user_service.find_userid(self.username.data)
if userid is not None:
- if not self.user_service.check_password(userid, field.data):
- raise wtforms.validators.ValidationError("Invalid password.")
+ try:
+ if not self.user_service.check_password(userid, field.data):
+ raise wtforms.validators.ValidationError(
+ "Invalid password.",
+ )
+ except TooManyFailedLogins:
+ raise wtforms.validators.ValidationError(
+ "There have been too many unsuccessful login attempts, "
+ "please try again later."
+ ) from None
diff --git a/warehouse/accounts/interfaces.py b/warehouse/accounts/interfaces.py
--- a/warehouse/accounts/interfaces.py
+++ b/warehouse/accounts/interfaces.py
@@ -13,6 +13,14 @@
from zope.interface import Interface
+class TooManyFailedLogins(Exception):
+
+ def __init__(self, *args, resets_in, **kwargs):
+ self.resets_in = resets_in
+
+ return super().__init__(*args, **kwargs)
+
+
class IUserService(Interface):
def get_user(userid):
diff --git a/warehouse/accounts/services.py b/warehouse/accounts/services.py
--- a/warehouse/accounts/services.py
+++ b/warehouse/accounts/services.py
@@ -10,22 +10,33 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import collections
import datetime
import functools
+import logging
from passlib.context import CryptContext
from sqlalchemy.orm.exc import NoResultFound
from zope.interface import implementer
-from warehouse.accounts.interfaces import IUserService
+from warehouse.accounts.interfaces import IUserService, TooManyFailedLogins
from warehouse.accounts.models import Email, User
+from warehouse.rate_limiting import IRateLimiter, DummyRateLimiter
+
+
+logger = logging.getLogger(__name__)
@implementer(IUserService)
class DatabaseUserService:
- def __init__(self, session):
+ def __init__(self, session, ratelimiters=None):
+ if ratelimiters is None:
+ ratelimiters = {}
+ ratelimiters = collections.defaultdict(DummyRateLimiter, ratelimiters)
+
self.db = session
+ self.ratelimiters = ratelimiters
self.hasher = CryptContext(
schemes=[
"argon2",
@@ -78,24 +89,49 @@ def find_userid_by_email(self, email):
return user_id
def check_password(self, userid, password):
+ # The very first thing we want to do is check to see if we've hit our
+ # global rate limit or not, assuming that we've been configured with a
+ # global rate limiter anyways.
+ if not self.ratelimiters["global"].test():
+ logger.warning("Global failed login threshold reached.")
+ raise TooManyFailedLogins(
+ resets_in=self.ratelimiters["global"].resets_in(),
+ )
+
user = self.get_user(userid)
- if user is None:
- return False
+ if user is not None:
+ # Now, check to make sure that we haven't hitten a rate limit on a
+ # per user basis.
+ if not self.ratelimiters["user"].test(user.id):
+ raise TooManyFailedLogins(
+ resets_in=self.ratelimiters["user"].resets_in(user.id),
+ )
+
+ # Actually check our hash, optionally getting a new hash for it if
+ # we should upgrade our saved hashed.
+ ok, new_hash = self.hasher.verify_and_update(
+ password,
+ user.password,
+ )
- # Actually check our hash, optionally getting a new hash for it if
- # we should upgrade our saved hashed.
- ok, new_hash = self.hasher.verify_and_update(password, user.password)
+ # First, check to see if the password that we were given was OK.
+ if ok:
+ # Then, if the password was OK check to see if we've been given
+ # a new password hash from the hasher, if so we'll want to save
+ # that hash.
+ if new_hash:
+ user.password = new_hash
- # Check if the password itself was OK or not.
- if not ok:
- return False
+ return True
- # If we've gotten a new password hash from the hasher, then we'll want
- # to save that hash.
- if new_hash:
- user.password = new_hash
+ # If we've gotten here, then we'll want to record a failed login in our
+ # rate limiting before returning False to indicate a failed password
+ # verification.
+ if user is not None:
+ self.ratelimiters["user"].hit(user.id)
+ self.ratelimiters["global"].hit()
- return True
+ return False
def create_user(self, username, name, password, email,
is_active=False, is_staff=False, is_superuser=False):
@@ -128,4 +164,18 @@ def verify_email(self, user_id, email_address):
def database_login_factory(context, request):
- return DatabaseUserService(request.db)
+ return DatabaseUserService(
+ request.db,
+ ratelimiters={
+ "global": request.find_service(
+ IRateLimiter,
+ name="global.login",
+ context=None,
+ ),
+ "user": request.find_service(
+ IRateLimiter,
+ name="user.login",
+ context=None,
+ ),
+ },
+ )
diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py
--- a/warehouse/accounts/views.py
+++ b/warehouse/accounts/views.py
@@ -13,14 +13,16 @@
import datetime
from pyblake2 import blake2b
-from pyramid.httpexceptions import HTTPMovedPermanently, HTTPSeeOther
+from pyramid.httpexceptions import (
+ HTTPMovedPermanently, HTTPSeeOther, HTTPTooManyRequests,
+)
from pyramid.security import remember, forget
from pyramid.view import view_config
from sqlalchemy.orm import joinedload
from warehouse.accounts import REDIRECT_FIELD_NAME
from warehouse.accounts import forms
-from warehouse.accounts.interfaces import IUserService
+from warehouse.accounts.interfaces import IUserService, TooManyFailedLogins
from warehouse.cache.origin import origin_cache
from warehouse.packaging.models import Project, Release
from warehouse.utils.http import is_safe_url
@@ -29,6 +31,25 @@
USER_ID_INSECURE_COOKIE = "user_id__insecure"
+@view_config(context=TooManyFailedLogins)
+def failed_logins(exc, request):
+ resp = HTTPTooManyRequests(
+ "There have been too many unsuccessful login attempts. Please try "
+ "again later.",
+ retry_after=exc.resets_in.total_seconds(),
+ )
+
+ # TODO: This is kind of gross, but we need it for as long as the legacy
+ # upload API exists and is supported. Once we get rid of that we can
+ # get rid of this as well.
+ resp.status = "{} {}".format(
+ resp.status_code,
+ "Too Many Failed Login Attempts",
+ )
+
+ return resp
+
+
@view_config(
route_name="accounts.profile",
renderer="accounts/profile.html",
diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -136,6 +136,7 @@ def configure(settings=None):
maybe_set(settings, "sentry.transport", "SENTRY_TRANSPORT")
maybe_set(settings, "sessions.url", "REDIS_URL")
maybe_set(settings, "download_stats.url", "REDIS_URL")
+ maybe_set(settings, "ratelimit.url", "REDIS_URL")
maybe_set(settings, "recaptcha.site_key", "RECAPTCHA_SITE_KEY")
maybe_set(settings, "recaptcha.secret_key", "RECAPTCHA_SECRET_KEY")
maybe_set(settings, "sessions.secret", "SESSION_SECRET")
@@ -297,6 +298,9 @@ def configure(settings=None):
# Register the configuration for the PostgreSQL database.
config.include(".db")
+ # Register support for our rate limiting mechanisms
+ config.include(".rate_limiting")
+
config.include(".search")
# Register the support for AWS
diff --git a/warehouse/rate_limiting/__init__.py b/warehouse/rate_limiting/__init__.py
new file mode 100644
--- /dev/null
+++ b/warehouse/rate_limiting/__init__.py
@@ -0,0 +1,112 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from datetime import datetime, timezone
+
+from first import first
+from limits import parse_many
+from limits.strategies import MovingWindowRateLimiter
+from limits.storage import storage_from_string
+from zope.interface import implementer
+
+from warehouse.rate_limiting.interfaces import IRateLimiter
+
+
+@implementer(IRateLimiter)
+class RateLimiter:
+
+ def __init__(self, storage, limit, identifiers=None):
+ if identifiers is None:
+ identifiers = []
+
+ self._window = MovingWindowRateLimiter(storage)
+ self._limits = parse_many(limit)
+ self._identifiers = identifiers
+
+ def _get_identifiers(self, identifiers):
+ return [str(i) for i in list(self._identifiers) + list(identifiers)]
+
+ def test(self, *identifiers):
+ return all([
+ self._window.test(limit, *self._get_identifiers(identifiers))
+ for limit in self._limits
+ ])
+
+ def hit(self, *identifiers):
+ return all([
+ self._window.hit(limit, *self._get_identifiers(identifiers))
+ for limit in self._limits
+ ])
+
+ def resets_in(self, *identifiers):
+ resets = []
+ for limit in self._limits:
+ resets_at, remaining = self._window.get_window_stats(
+ limit,
+ *self._get_identifiers(identifiers),
+ )
+
+ # If this limit has any remaining limits left, then we will skip it
+ # since it doesn't need reset.
+ if remaining > 0:
+ continue
+
+ current = datetime.now(tz=timezone.utc)
+ reset = datetime.fromtimestamp(resets_at, tz=timezone.utc)
+
+ # If our current datetime is either greater than or equal to when
+ # the limit resets, then we will skipp it since it has either
+ # already reset, or it is resetting now.
+ if current >= reset:
+ continue
+
+ # Add a timedelta that represents how long until this limit resets.
+ resets.append(reset - current)
+
+ # If we have any resets, then we'll go through and find whichever one
+ # is going to reset soonest and use that as our hint for when this
+ # limit might be available again.
+ return first(sorted(resets))
+
+
+@implementer(IRateLimiter)
+class DummyRateLimiter:
+
+ def test(self, *identifiers):
+ return True
+
+ def hit(self, *identifiers):
+ return True
+
+ def resets_in(self, *identifiers):
+ return None
+
+
+class RateLimit:
+
+ def __init__(self, limit, identifiers=None, limiter_class=RateLimiter):
+ self.limit = limit
+ self.identifiers = identifiers
+ self.limiter_class = limiter_class
+
+ def __call__(self, context, request):
+ return self.limiter_class(
+ request.registry["ratelimiter.storage"],
+ limit=self.limit,
+ identifiers=self.identifiers,
+ )
+
+
+def includeme(config):
+ config.registry["ratelimiter.storage"] = storage_from_string(
+ config.registry.settings["ratelimit.url"],
+ )
diff --git a/warehouse/rate_limiting/interfaces.py b/warehouse/rate_limiting/interfaces.py
new file mode 100644
--- /dev/null
+++ b/warehouse/rate_limiting/interfaces.py
@@ -0,0 +1,36 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from zope.interface import Interface
+
+
+class IRateLimiter(Interface):
+
+ def test(*identifiers):
+ """
+ Checks if the rate limit identified by the identifiers has been
+ reached, returning a boolean to indicate whether or not to allow the
+ action.
+ """
+
+ def hit(*identifiers):
+ """
+ Registers a hit for the rate limit identified by the identifiers. This
+ will return a boolean to indicate whether or not to allow the action
+ for which a hit has been registered.
+ """
+
+ def resets_in(*identifiers):
+ """
+ Returns a timedelta indicating how long until the rate limit identified
+ by identifiers will reset.
+ """
| diff --git a/tests/conftest.py b/tests/conftest.py
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -136,6 +136,7 @@ def app_config(database):
"database.url": database,
"docs.url": "http://docs.example.com/",
"download_stats.url": "redis://localhost:0/",
+ "ratelimit.url": "memory://",
"elasticsearch.url": "https://localhost/warehouse",
"files.backend": "warehouse.packaging.services.LocalFileStorage",
"files.url": "http://localhost:7000/",
diff --git a/tests/unit/accounts/test_core.py b/tests/unit/accounts/test_core.py
--- a/tests/unit/accounts/test_core.py
+++ b/tests/unit/accounts/test_core.py
@@ -154,7 +154,7 @@ def test_includeme(monkeypatch):
config = pretend.stub(
register_service_factory=pretend.call_recorder(
- lambda factory, iface: None
+ lambda factory, iface, name=None: None
),
add_request_method=pretend.call_recorder(lambda f, name, reify: None),
set_authentication_policy=pretend.call_recorder(lambda p: None),
diff --git a/tests/unit/accounts/test_forms.py b/tests/unit/accounts/test_forms.py
--- a/tests/unit/accounts/test_forms.py
+++ b/tests/unit/accounts/test_forms.py
@@ -15,6 +15,7 @@
import wtforms
from warehouse.accounts import forms
+from warehouse.accounts.interfaces import TooManyFailedLogins
from warehouse import recaptcha
@@ -100,6 +101,27 @@ def test_validate_password_notok(self, db_session):
assert user_service.find_userid.calls == [pretend.call("my_username")]
assert user_service.check_password.calls == [pretend.call(1, "pw")]
+ def test_validate_password_too_many_failed(self):
+ @pretend.call_recorder
+ def check_password(userid, password):
+ raise TooManyFailedLogins(resets_in=None)
+
+ user_service = pretend.stub(
+ find_userid=pretend.call_recorder(lambda userid: 1),
+ check_password=check_password,
+ )
+ form = forms.LoginForm(
+ data={"username": "my_username"},
+ user_service=user_service,
+ )
+ field = pretend.stub(data="pw")
+
+ with pytest.raises(wtforms.validators.ValidationError):
+ form.validate_password(field)
+
+ assert user_service.find_userid.calls == [pretend.call("my_username")]
+ assert user_service.check_password.calls == [pretend.call(1, "pw")]
+
class TestRegistrationForm:
def test_create(self):
diff --git a/tests/unit/accounts/test_services.py b/tests/unit/accounts/test_services.py
--- a/tests/unit/accounts/test_services.py
+++ b/tests/unit/accounts/test_services.py
@@ -13,11 +13,13 @@
import uuid
import pretend
+import pytest
from zope.interface.verify import verifyClass
from warehouse.accounts import services
-from warehouse.accounts.interfaces import IUserService
+from warehouse.accounts.interfaces import IUserService, TooManyFailedLogins
+from warehouse.rate_limiting.interfaces import IRateLimiter
from ...common.db.accounts import UserFactory, EmailFactory
@@ -65,10 +67,42 @@ def test_find_userid_existing_user(self, db_session):
service = services.DatabaseUserService(db_session)
assert service.find_userid(user.username) == user.id
+ def test_check_password_global_rate_limited(self):
+ resets = pretend.stub()
+ limiter = pretend.stub(test=lambda: False, resets_in=lambda: resets)
+ service = services.DatabaseUserService(
+ pretend.stub(),
+ ratelimiters={"global": limiter},
+ )
+
+ with pytest.raises(TooManyFailedLogins) as excinfo:
+ service.check_password(uuid.uuid4(), None)
+
+ assert excinfo.value.resets_in is resets
+
def test_check_password_nonexistant_user(self, db_session):
service = services.DatabaseUserService(db_session)
assert not service.check_password(uuid.uuid4(), None)
+ def test_check_password_user_rate_limited(self, db_session):
+ user = UserFactory.create()
+ resets = pretend.stub()
+ limiter = pretend.stub(
+ test=pretend.call_recorder(lambda uid: False),
+ resets_in=pretend.call_recorder(lambda uid: resets),
+ )
+ service = services.DatabaseUserService(
+ db_session,
+ ratelimiters={"user": limiter},
+ )
+
+ with pytest.raises(TooManyFailedLogins) as excinfo:
+ service.check_password(user.id, None)
+
+ assert excinfo.value.resets_in is resets
+ assert limiter.test.calls == [pretend.call(user.id)]
+ assert limiter.resets_in.calls == [pretend.call(user.id)]
+
def test_check_password_invalid(self, db_session):
user = UserFactory.create()
service = services.DatabaseUserService(db_session)
@@ -178,11 +212,37 @@ def test_create_login_error(self, db_session):
def test_database_login_factory(monkeypatch):
service_obj = pretend.stub()
- service_cls = pretend.call_recorder(lambda session: service_obj)
+ service_cls = pretend.call_recorder(
+ lambda session, ratelimiters: service_obj,
+ )
monkeypatch.setattr(services, "DatabaseUserService", service_cls)
+ global_ratelimiter = pretend.stub()
+ user_ratelimiter = pretend.stub()
+
+ def find_service(iface, name, context):
+ assert iface is IRateLimiter
+ assert context is None
+ assert name in {"global.login", "user.login"}
+
+ return ({
+ "global.login": global_ratelimiter,
+ "user.login": user_ratelimiter
+ }).get(name)
+
context = pretend.stub()
- request = pretend.stub(db=pretend.stub())
+ request = pretend.stub(
+ db=pretend.stub(),
+ find_service=find_service,
+ )
assert services.database_login_factory(context, request) is service_obj
- assert service_cls.calls == [pretend.call(request.db)]
+ assert service_cls.calls == [
+ pretend.call(
+ request.db,
+ ratelimiters={
+ "global": global_ratelimiter,
+ "user": user_ratelimiter,
+ },
+ ),
+ ]
diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py
--- a/tests/unit/accounts/test_views.py
+++ b/tests/unit/accounts/test_views.py
@@ -20,11 +20,25 @@
from pyramid.httpexceptions import HTTPMovedPermanently, HTTPSeeOther
from warehouse.accounts import views
-from warehouse.accounts.interfaces import IUserService
+from warehouse.accounts.interfaces import IUserService, TooManyFailedLogins
from ...common.db.accounts import UserFactory
+class TestFailedLoginView:
+ exc = TooManyFailedLogins(resets_in=datetime.timedelta(seconds=600))
+ request = pretend.stub()
+
+ resp = views.failed_logins(exc, request)
+
+ assert resp.status == "429 Too Many Failed Login Attempts"
+ assert resp.detail == (
+ "There have been too many unsuccessful login attempts. Please try "
+ "again later."
+ )
+ assert dict(resp.headers).get("Retry-After") == "600"
+
+
class TestUserProfile:
def test_user_redirects_username(self, db_request):
diff --git a/tests/unit/rate_limiting/__init__.py b/tests/unit/rate_limiting/__init__.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/rate_limiting/__init__.py
@@ -0,0 +1,11 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/tests/unit/rate_limiting/test_core.py b/tests/unit/rate_limiting/test_core.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/rate_limiting/test_core.py
@@ -0,0 +1,137 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+
+import pretend
+
+from limits import storage
+
+from warehouse import rate_limiting
+from warehouse.rate_limiting import RateLimiter, DummyRateLimiter, RateLimit
+
+
+class TestRateLimiter:
+
+ def test_basic(self):
+ limiter = RateLimiter(
+ storage.MemoryStorage(),
+ "1 per minute",
+ identifiers=["foo"],
+ )
+
+ assert limiter.test("foo")
+ assert limiter.test("bar")
+
+ while limiter.hit("bar"):
+ pass
+
+ assert limiter.test("foo")
+ assert not limiter.test("bar")
+
+ def test_namespacing(self):
+ storage_ = storage.MemoryStorage()
+ limiter1 = RateLimiter(storage_, "1 per minute", identifiers=["foo"])
+ limiter2 = RateLimiter(storage_, "1 per minute")
+
+ assert limiter1.test("bar")
+ assert limiter2.test("bar")
+
+ while limiter1.hit("bar"):
+ pass
+
+ assert limiter2.test("bar")
+ assert not limiter1.test("bar")
+
+ def test_results_in(self):
+ limiter = RateLimiter(storage.MemoryStorage(), "1 per minute")
+
+ assert limiter.resets_in("foo") is None
+
+ while limiter.hit("foo"):
+ pass
+
+ assert limiter.resets_in("foo") > datetime.timedelta(seconds=0)
+ assert limiter.resets_in("foo") < datetime.timedelta(seconds=60)
+
+ def test_results_in_expired(self):
+ limiter = RateLimiter(
+ storage.MemoryStorage(),
+ "1 per minute; 1 per hour; 1 per day",
+ )
+
+ current = datetime.datetime.now(tz=datetime.timezone.utc)
+ stats = iter([
+ (0, 0),
+ ((current + datetime.timedelta(seconds=60)).timestamp(), 0),
+ ((current + datetime.timedelta(seconds=5)).timestamp(), 0),
+ ])
+
+ limiter._window = pretend.stub(
+ get_window_stats=lambda l, *a: next(stats),
+ )
+
+ resets_in = limiter.resets_in("foo")
+
+ assert resets_in > datetime.timedelta(seconds=0)
+ assert resets_in <= datetime.timedelta(seconds=5)
+
+
+class TestDummyRateLimiter:
+
+ def test_basic(self):
+ limiter = DummyRateLimiter()
+
+ assert limiter.test()
+ assert limiter.hit()
+ assert limiter.resets_in() is None
+
+
+class TestRateLimit:
+
+ def test_basic(self):
+ limiter_obj = pretend.stub()
+ limiter_class = pretend.call_recorder(lambda *a, **kw: limiter_obj)
+
+ context = pretend.stub()
+ request = pretend.stub(
+ registry={"ratelimiter.storage": pretend.stub()},
+ )
+
+ result = RateLimit(
+ "1 per 5 minutes",
+ identifiers=["foo"],
+ limiter_class=limiter_class,
+ )(context, request)
+
+ assert result is limiter_obj
+ assert limiter_class.calls == [
+ pretend.call(
+ request.registry["ratelimiter.storage"],
+ limit="1 per 5 minutes",
+ identifiers=["foo"],
+ ),
+ ]
+
+
+def test_includeme():
+ registry = {}
+ config = pretend.stub(
+ registry=pretend.stub(
+ settings={"ratelimit.url": "memory://"},
+ __setitem__=registry.__setitem__,
+ ),
+ )
+
+ rate_limiting.includeme(config)
+
+ assert isinstance(registry["ratelimiter.storage"], storage.MemoryStorage)
diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py
--- a/tests/unit/test_config.py
+++ b/tests/unit/test_config.py
@@ -320,6 +320,7 @@ def __init__(self):
pretend.call(".domain"),
pretend.call(".i18n"),
pretend.call(".db"),
+ pretend.call(".rate_limiting"),
pretend.call(".search"),
pretend.call(".aws"),
pretend.call(".celery"),
| Rate limit login attacks
We need to enable rate limiting on the log in attempts so people cannot easily brute force them.
| I'm not a Warehouse contributor (yet), but below related things.
- Here is Pyramid code I use in another project: https://gist.github.com/miohtama/86c86b4caa61a5615e44
- Background story: http://opensourcehacker.com/2014/07/09/rolling-time-window-counters-with-redis-and-mitigating-botnet-driven-login-attacks/
- More about the topic: http://operationssecurity.org/en/latest/user/index.html#brute-force-login-prevention
I'm going to be taking a stab at authentication/registration and obviously falls in that ballpark. I'm thinking that rate limiting isn't something that we want to do (unless there's a technique that I'm missing).
If the account in question is used as the key by which we're rate limiting, it's easily possible to effectively DoS a legitimate user. Even if an account lockout can be reset by a password reset via email, it's still a painful process for the user.
If we use the source address as the key, then we're potentially DoSing all users behind a proxy.
Rather than rate limit, I'm wondering if the form should use either a captcha or a secret question to which an answer is given during registration (although that would pose a problem for account migration) when attempts >= N (3 seems pretty common).
In addition to the captcha/secret question, a delay could be added to the login flow, which would drastically increase the time required for automated attacks.
Thoughts?
There are three ways of rate limiting for login, sign up and password reset. The rate limit effect can be CAPTCHA, cooldown or both.
- First if one want to keep the system simply just require a CAPTCHA on every attempt. This is nuisance for the users, but saves a lot of development time thinking complex attack scenarios. Use a strong third party CAPTCHA where there is machine intelligence and big data going behind it. Usually this is Recaptcha.
- Limit by IP address. This is pretty much useless as in future IPv6 gives attackers basically endless addresses and you need fancy way to upscale address matching to network block http://serverfault.com/questions/631160/banning-ipv6-addresses
- Limit by username: this prevents spearhead attacks against individual users. Show CAPTCHA after N attempts, force cooldown after M attempts. This is the scenario where delay can help. However in the contemporary world of cyber threads this is rarely the most common attack method.
- Global attempt threshold: require CAPTCHA for all attempts (even for first login/password) if the system attempt threshold excess normal values (which one obtains from active system monitoring). In this case the system is most likely under a botnet attack and attackers are going through email (username)/password combo lists bought from blackmarkets and having thousands of IPs in their possession. The attackers just cycle to the next email (username) / password / IP combo for every attempt so cooling down cannot help here. This is what bad guys tend to do against high value targets which I would see PyPi to be. It's likely millions of different username combos from thousands of IPs.
To count attempts one can use rolling time window counters. Here is one example I made for Redis:
- https://opensourcehacker.com/2014/07/09/rolling-time-window-counters-with-redis-and-mitigating-botnet-driven-login-attacks/
> Global attempt threshold
I hadn't actually considered that attack vector, thanks for pointing it out. In light of that, I'm +1 to forging down the Recaptcha path on every login attempt. This seems option with the least friction, while minimizing the potential for effectively creating DoS attacks for legitimate users. Minimal development effort while (seemingly) providing the highest level of security against brute force attacks. Sounds like an easy win to me.
Any thoughts from others?
| 2016-12-06T23:43:08Z | [] | [] |
pypi/warehouse | 1,598 | pypi__warehouse-1598 | [
"1481"
] | 1b3d4720902550b32b531bbeedadd52032eeafd4 | diff --git a/warehouse/views.py b/warehouse/views.py
--- a/warehouse/views.py
+++ b/warehouse/views.py
@@ -217,7 +217,22 @@ def search(request):
query = request.es.query()
if request.params.get("o"):
- query = query.sort(request.params["o"])
+ sort_key = request.params["o"]
+ if sort_key.startswith("-"):
+ sort = {
+ sort_key[1:]: {
+ "order": "desc",
+ "unmapped_type": "long",
+ },
+ }
+ else:
+ sort = {
+ sort_key: {
+ "unmapped_type": "long",
+ }
+ }
+
+ query = query.sort(sort)
if request.params.getall("c"):
query = query.filter("terms", classifiers=request.params.getall("c"))
| diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py
--- a/tests/unit/test_views.py
+++ b/tests/unit/test_views.py
@@ -220,11 +220,25 @@ def test_with_a_single_char_query(self, monkeypatch, db_request, page):
),
]
- @pytest.mark.parametrize("page", [None, 1, 5])
- def test_with_an_ordering(self, monkeypatch, db_request, page):
- params = MultiDict({"q": "foo bar", "o": "-created"})
+ @pytest.mark.parametrize(
+ ("page", "order", "expected"),
+ [
+ (None, None, []),
+ (
+ 1,
+ "-created",
+ [{"created": {"order": "desc", "unmapped_type": "long"}}],
+ ),
+ (5, "created", [{"created": {"unmapped_type": "long"}}]),
+ ],
+ )
+ def test_with_an_ordering(self, monkeypatch, db_request, page, order,
+ expected):
+ params = MultiDict({"q": "foo bar"})
if page is not None:
params["page"] = page
+ if order is not None:
+ params["o"] = order
db_request.params = params
sort = pretend.stub()
@@ -254,7 +268,11 @@ def test_with_an_ordering(self, monkeypatch, db_request, page):
"available_filters": [],
}
assert page_cls.calls == [
- pretend.call(sort, url_maker=url_maker, page=page or 1),
+ pretend.call(
+ sort if order is not None else suggest,
+ url_maker=url_maker,
+ page=page or 1,
+ ),
]
assert url_maker_factory.calls == [pretend.call(db_request)]
assert db_request.es.query.calls == [
@@ -270,9 +288,7 @@ def test_with_an_ordering(self, monkeypatch, db_request, page):
term={"field": "name"},
),
]
- assert suggest.sort.calls == [
- pretend.call("-created")
- ]
+ assert suggest.sort.calls == [pretend.call(i) for i in expected]
@pytest.mark.parametrize("page", [None, 1, 5])
def test_with_classifiers(self, monkeypatch, db_request, page):
| Invalid ordering param causes search_phrase_execution_exception
It's possible to generate a 500-level exception by passing an invalid `o` parameter (for ordering) to the search route, e.g. https://pypi.org/search/?q=&o=foobar.
We should check if the parameter is a valid field before trying to sort on it, [or possibly just ignore it](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-sort.html#_ignoring_unmapped_fields).
Here are the relevant lines in `warehouse/views.py`: https://github.com/pypa/warehouse/blob/436d52d1dcebc593846d04559f38a5de980cb0c1/warehouse/views.py#L219-L220
| 2016-12-18T01:20:29Z | [] | [] |
|
pypi/warehouse | 1,623 | pypi__warehouse-1623 | [
"1599"
] | 0ca21fe1e038313f59c7873cf3f0da0a1922cde3 | diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py
--- a/warehouse/legacy/api/json.py
+++ b/warehouse/legacy/api/json.py
@@ -42,7 +42,9 @@ def json_project(project, request):
release = (
request.db.query(Release)
.filter(Release.project == project)
- .order_by(Release._pypi_ordering.desc())
+ .order_by(
+ Release.is_prerelease.nullslast(),
+ Release._pypi_ordering.desc())
.limit(1)
.one()
)
| diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py
--- a/tests/unit/legacy/api/test_json.py
+++ b/tests/unit/legacy/api/test_json.py
@@ -66,6 +66,41 @@ def test_calls_release_detail(self, monkeypatch, db_request):
assert resp is response
assert json_release.calls == [pretend.call(release, db_request)]
+ def test_with_prereleases(self, monkeypatch, db_request):
+ project = ProjectFactory.create()
+
+ ReleaseFactory.create(project=project, version="1.0")
+ ReleaseFactory.create(project=project, version="2.0")
+ ReleaseFactory.create(project=project, version="4.0.dev0")
+
+ release = ReleaseFactory.create(project=project, version="3.0")
+
+ response = pretend.stub()
+ json_release = pretend.call_recorder(lambda ctx, request: response)
+ monkeypatch.setattr(json, "json_release", json_release)
+
+ resp = json.json_project(project, db_request)
+
+ assert resp is response
+ assert json_release.calls == [pretend.call(release, db_request)]
+
+ def test_only_prereleases(self, monkeypatch, db_request):
+ project = ProjectFactory.create()
+
+ ReleaseFactory.create(project=project, version="1.0.dev0")
+ ReleaseFactory.create(project=project, version="2.0.dev0")
+
+ release = ReleaseFactory.create(project=project, version="3.0.dev0")
+
+ response = pretend.stub()
+ json_release = pretend.call_recorder(lambda ctx, request: response)
+ monkeypatch.setattr(json, "json_release", json_release)
+
+ resp = json.json_project(project, db_request)
+
+ assert resp is response
+ assert json_release.calls == [pretend.call(release, db_request)]
+
class TestJSONRelease:
| json urls object may contain prereleases
I think the urls object in the JSON output should reference downloads for the latest stable release; even after #1519, the urls object may contain references to prerelease versions. For example, https://pypi.org/pypi/wheel/json has
```json
{
"urls": [
{
"filename": "wheel-0.30.0a0-py2.py3-none-any.whl",
"url": "https://files.pythonhosted.org/packages/83/53/e120833aa2350db333df89a40dea3b310dd9dabf6f29eaa18934a597dc79/wheel-0.30.0a0-py2.py3-none-any.whl"
}, ...
```
I think it should point to 0.29.0 instead, like the project page does.
| 2016-12-28T20:08:33Z | [] | [] |
|
pypi/warehouse | 1,706 | pypi__warehouse-1706 | [
"1699"
] | af40bc837333bfa142fd9a6a5a9ed2c81c73df52 | diff --git a/warehouse/sessions.py b/warehouse/sessions.py
--- a/warehouse/sessions.py
+++ b/warehouse/sessions.py
@@ -272,16 +272,13 @@ def session_view(view, info):
# with a small wrapper around it to ensure that it has a Vary: Cookie
# header.
return add_vary("Cookie")(view)
+ elif info.exception_only:
+ return view
else:
# If we're not using the session on this view, then we'll wrap the view
# with a wrapper that just ensures that the session cannot be used.
@functools.wraps(view)
def wrapped(context, request):
- # TODO: When Pyramid 1.8 is released we can make this better by
- # using info.exception_only.
- if request.exception is not None:
- return view(context, request)
-
# Save the original session so that we can restore it once the
# inner views have been called.
original_session = request.session
| diff --git a/tests/unit/accounts/test_auth_policy.py b/tests/unit/accounts/test_auth_policy.py
--- a/tests/unit/accounts/test_auth_policy.py
+++ b/tests/unit/accounts/test_auth_policy.py
@@ -12,6 +12,7 @@
import pretend
+from pyramid import authentication
from pyramid.interfaces import IAuthenticationPolicy
from zope.interface.verify import verifyClass
@@ -28,10 +29,17 @@ def test_verify(self):
)
def test_unauthenticated_userid_no_userid(self, monkeypatch):
+ extract_http_basic_credentials = \
+ pretend.call_recorder(lambda request: None)
+ monkeypatch.setattr(
+ authentication,
+ "extract_http_basic_credentials",
+ extract_http_basic_credentials,
+ )
+
policy = auth_policy.BasicAuthAuthenticationPolicy(
check=pretend.stub(),
)
- policy._get_credentials = pretend.call_recorder(lambda request: None)
vary_cb = pretend.stub()
add_vary_cb = pretend.call_recorder(lambda *v: vary_cb)
@@ -42,17 +50,22 @@ def test_unauthenticated_userid_no_userid(self, monkeypatch):
)
assert policy.unauthenticated_userid(request) is None
- assert policy._get_credentials.calls == [pretend.call(request)]
+ assert extract_http_basic_credentials.calls == [pretend.call(request)]
assert add_vary_cb.calls == [pretend.call("Authorization")]
assert request.add_response_callback.calls == [pretend.call(vary_cb)]
def test_unauthenticated_userid_with_userid(self, monkeypatch):
+ extract_http_basic_credentials = \
+ pretend.call_recorder(lambda request: ("username", "password"))
+ monkeypatch.setattr(
+ authentication,
+ "extract_http_basic_credentials",
+ extract_http_basic_credentials,
+ )
+
policy = auth_policy.BasicAuthAuthenticationPolicy(
check=pretend.stub(),
)
- policy._get_credentials = pretend.call_recorder(
- lambda request: ("username", "password")
- )
vary_cb = pretend.stub()
add_vary_cb = pretend.call_recorder(lambda *v: vary_cb)
@@ -68,6 +81,7 @@ def test_unauthenticated_userid_with_userid(self, monkeypatch):
)
assert policy.unauthenticated_userid(request) is userid
+ assert extract_http_basic_credentials.calls == [pretend.call(request)]
assert request.find_service.calls == [
pretend.call(IUserService, context=None),
]
diff --git a/tests/unit/test_sessions.py b/tests/unit/test_sessions.py
--- a/tests/unit/test_sessions.py
+++ b/tests/unit/test_sessions.py
@@ -544,7 +544,7 @@ def test_has_options(self):
@pytest.mark.parametrize("uses_session", [False, None])
def test_invalid_session(self, uses_session):
context = pretend.stub()
- request = pretend.stub(session=pretend.stub(), exception=None)
+ request = pretend.stub(session=pretend.stub())
response = pretend.stub()
@pretend.call_recorder
@@ -552,7 +552,7 @@ def view(context, request):
assert isinstance(request.session, InvalidSession)
return response
- info = pretend.stub(options={})
+ info = pretend.stub(options={}, exception_only=False)
if uses_session is not None:
info.options["uses_session"] = uses_session
derived_view = session_view(view, info)
| Update pyramid to 1.8.1
There's a new version of [pyramid](https://pypi.python.org/pypi/pyramid) available.
You are currently using **1.7.3**. I have updated it to **1.8.1**
These links might come in handy: <a href="http://pypi.python.org/pypi/pyramid">PyPI</a> | <a href="https://pyup.io/changelogs/pyramid/">Changelog</a> | <a href="https://trypyramid.com">Homepage</a>
### Changelog
>
>### 1.8
>================
>- No major changes from 1.8b1.
>### 1.8b1
>==================
>Features
>--------
>- Added an ``override`` option to ``config.add_translation_dirs`` to allow
> later calls to place translation directories at a higher priority than
> earlier calls. See https://github.com/Pylons/pyramid/pull/2902
>Documentation Changes
>---------------------
>- Improve registry documentation to discuss uses as a component registry
> and as a dictionary. See https://github.com/Pylons/pyramid/pull/2893
>- Quick Tour, Quick Tutorial, and most other remaining documentation updated to
> use cookiecutters instead of pcreate and scaffolds.
> See https://github.com/Pylons/pyramid/pull/2888 and
> https://github.com/Pylons/pyramid/pull/2889
>- Fix unittests in wiki2 to work without different dependencies between
> py2 and py3. See https://github.com/Pylons/pyramid/pull/2899
>- Update Windows documentation to track newer Python 3 improvements to the
> installer. See https://github.com/Pylons/pyramid/pull/2900
>- Updated the ``mod_wsgi`` tutorial to use cookiecutters and Apache 2.4+.
> See https://github.com/Pylons/pyramid/pull/2901
>### 1.8a1
>==================
>Backward Incompatibilities
>--------------------------
>- Support for the ``IContextURL`` interface that was deprecated in Pyramid 1.3
> has been removed. See https://github.com/Pylons/pyramid/pull/2822
>- Following the Pyramid deprecation period (1.6 -> 1.8),
> daemon support for pserve has been removed. This includes removing the
> daemon commands (start, stop, restart, status) as well as the following
> arguments: ``--daemon``, ``--pid-file``, ``--log-file``,
> ``--monitor-restart``, ``--status``, ``--user``, ``--group``,
> ``--stop-daemon``
> To run your server as a daemon you should use a process manager instead of
> pserve.
> See https://github.com/Pylons/pyramid/pull/2615
>- ``pcreate`` is now interactive by default. You will be prompted if a file
> already exists with different content. Previously if there were similar
> files it would silently skip them unless you specified ``--interactive``
> or ``--overwrite``.
> See https://github.com/Pylons/pyramid/pull/2775
>- Removed undocumented argument ``cachebust_match`` from
> ``pyramid.static.static_view``. This argument was shipped accidentally
> in Pyramid 1.6. See https://github.com/Pylons/pyramid/pull/2681
>- Change static view to avoid setting the ``Content-Encoding`` response header
> to an encoding guessed using Python's ``mimetypes`` module. This was causing
> clients to decode the content of gzipped files when downloading them. The
> client would end up with a ``foo.txt.gz`` file on disk that was already
> decoded, thus should really be ``foo.txt``. Also, the ``Content-Encoding``
> should only have been used if the client itself broadcast support for the
> encoding via ``Accept-Encoding`` request headers.
> See https://github.com/Pylons/pyramid/pull/2810
>- Settings are no longer accessible as attributes on the settings object
> (e.g. ``request.registry.settings.foo``). This was deprecated in Pyramid 1.2.
> See https://github.com/Pylons/pyramid/pull/2823
>Features
>--------
>- Python 3.6 compatibility.
> https://github.com/Pylons/pyramid/issues/2835
>- ``pcreate`` learned about ``--package-name`` to allow you to create a new
> project in an existing folder with a different package name than the project
> name. See https://github.com/Pylons/pyramid/pull/2783
>- The ``_get_credentials`` private method of ``BasicAuthAuthenticationPolicy``
> has been extracted into standalone function ``extract_http_basic_credentials``
> in ``pyramid.authentication`` module, this function extracts HTTP Basic
> credentials from a ``request`` object, and returns them as a named tuple.
> See https://github.com/Pylons/pyramid/pull/2662
>- Pyramid 1.4 silently dropped a feature of the configurator that has been
> restored. It's again possible for action discriminators to conflict across
> different action orders.
> See https://github.com/Pylons/pyramid/pull/2757
>- ``pyramid.paster.bootstrap`` and its sibling ``pyramid.scripting.prepare``
> can now be used as context managers to automatically invoke the ``closer``
> and pop threadlocals off of the stack to prevent memory leaks.
> See https://github.com/Pylons/pyramid/pull/2760
>- Added ``pyramid.config.Configurator.add_exception_view`` and the
> ``pyramid.view.exception_view_config`` decorator. It is now possible using
> these methods or via the new ``exception_only=True`` option to ``add_view``
> to add a view which will only be matched when handling an exception.
> Previously any exception views were also registered for a traversal
> context that inherited from the exception class which prevented any
> exception-only optimizations.
> See https://github.com/Pylons/pyramid/pull/2660
>- Added the ``exception_only`` boolean to
> ``pyramid.interfaces.IViewDeriverInfo`` which can be used by view derivers
> to determine if they are wrapping a view which only handles exceptions.
> This means that it is no longer necessary to perform request-time checks
> for ``request.exception`` to determine if the view is handling an exception
> - the pipeline can be optimized at config-time.
> See https://github.com/Pylons/pyramid/pull/2660
>- ``pserve`` should now work with ``gevent`` and other workers that need
> to monkeypatch the process, assuming the server and / or the app do so
> as soon as possible before importing the rest of pyramid.
> See https://github.com/Pylons/pyramid/pull/2797
>- Pyramid no longer copies the settings object passed to the
> ``pyramid.config.Configurator(settings=)``. The original ``dict`` is kept.
> See https://github.com/Pylons/pyramid/pull/2823
>- The csrf trusted origins setting may now be a whitespace-separated list of
> domains. Previously only a python list was allowed. Also, it can now be set
> using the ``PYRAMID_CSRF_TRUSTED_ORIGINS`` environment variable similar to
> other settings. See https://github.com/Pylons/pyramid/pull/2823
>- ``pserve --reload`` now uses the
> `hupper <http://docs.pylonsproject.org/projects/hupper/en/latest/>`
> library to monitor file changes. This comes with many improvements:
> - If the `watchdog <http://pythonhosted.org/watchdog/>`_ package is
> installed then monitoring will be done using inotify instead of
> cpu and disk-intensive polling.
> - The monitor is now a separate process that will not crash and starts up
> before any of your code.
> - The monitor will not restart the process after a crash until a file is
> saved.
> - The monitor works on windows.
> - You can now trigger a reload manually from a pyramid view or any other
> code via ``hupper.get_reloader().trigger_reload()``. Kind of neat.
> - You can trigger a reload by issuing a ``SIGHUP`` to the monitor process.
> See https://github.com/Pylons/pyramid/pull/2805
>- A new ``[pserve]`` section is supported in your config files with a
> ``watch_files`` key that can configure ``pserve --reload`` to monitor custom
> file paths. See https://github.com/Pylons/pyramid/pull/2827
>- Allow streaming responses to be made from subclasses of
> ``pyramid.httpexceptions.HTTPException``. Previously the response would
> be unrolled while testing for a body, making it impossible to stream
> a response.
> See https://github.com/Pylons/pyramid/pull/2863
>- Update starter, alchemy and zodb scaffolds to support IPv6 by using the
> new ``listen`` directives in waitress.
> See https://github.com/Pylons/pyramid/pull/2853
>- All p* scripts now use argparse instead of optparse. This improves their
> ``--help`` output as well as enabling nicer documentation of their options.
> See https://github.com/Pylons/pyramid/pull/2864
>- Any deferred configuration action registered via ``config.action`` may now
> depend on threadlocal state, such as asset overrides, being active when
> the action is executed.
> See https://github.com/Pylons/pyramid/pull/2873
>- Asset specifications for directories passed to
> ``config.add_translation_dirs`` now support overriding the entire asset
> specification, including the folder name. Previously only the package name
> was supported and the folder would always need to have the same name.
> See https://github.com/Pylons/pyramid/pull/2873
>- ``config.begin()`` will propagate the current threadlocal request through
> as long as the registry is the same. For example:
> .. code-block:: python
> request = Request.blank(...)
> config.begin(request) pushes a request
> config.begin() propagates the previous request through unchanged
> assert get_current_request() is request
> See https://github.com/Pylons/pyramid/pull/2873
>Bug Fixes
>---------
>- Fixed bug in ``proutes`` such that it now shows the correct view when a
> class and ``attr`` is involved.
> See: https://github.com/Pylons/pyramid/pull/2687
>- Fix a ``FutureWarning`` in Python 3.5 when using ``re.split`` on the
> ``format`` setting to the ``proutes`` script.
> See https://github.com/Pylons/pyramid/pull/2714
>- Fix a ``RuntimeWarning`` emitted by WebOb when using arbitrary objects
> as the ``userid`` in the ``AuthTktAuthenticationPolicy``. This is now caught
> by the policy and the object is serialized as a base64 string to avoid
> the cryptic warning. Since the userid will be read back as a string on
> subsequent requests a more useful warning is emitted encouraging you to
> use a primitive type instead.
> See https://github.com/Pylons/pyramid/pull/2715
>- Pyramid 1.6 introduced the ability for an action to invoke another action.
> There was a bug in the way that ``config.add_view`` would interact with
> custom view derivers introduced in Pyramid 1.7 because the view's
> discriminator cannot be computed until view derivers and view predicates
> have been created in earlier orders. Invoking an action from another action
> would trigger an unrolling of the pipeline and would compute discriminators
> before they were ready. The new behavior respects the ``order`` of the action
> and ensures the discriminators are not computed until dependent actions
> from previous orders have executed.
> See https://github.com/Pylons/pyramid/pull/2757
>- Fix bug in i18n where the default domain would always use the Germanic plural
> style, even if a different plural function is defined in the relevant
> messages file. See https://github.com/Pylons/pyramid/pull/2859
>- The ``config.override_asset`` method now occurs during
> ``pyramid.config.PHASE1_CONFIG`` such that it is ordered to execute before
> any calls to ``config.add_translation_dirs``.
> See https://github.com/Pylons/pyramid/pull/2873
>Deprecations
>------------
>- The ``pcreate`` script and related scaffolds have been deprecated in favor
> of the popular
> `cookiecutter <https://cookiecutter.readthedocs.io/en/latest/>`_ project.
> All of Pyramid's official scaffolds as well as the tutorials have been
> ported to cookiecutters:
> - `pyramid-cookiecutter-starter
> <https://github.com/Pylons/pyramid-cookiecutter-starter>`_
> - `pyramid-cookiecutter-alchemy
> <https://github.com/Pylons/pyramid-cookiecutter-alchemy>`_
> - `pyramid-cookiecutter-zodb
> <https://github.com/Pylons/pyramid-cookiecutter-zodb>`_
> See https://github.com/Pylons/pyramid/pull/2780
>Documentation Changes
>---------------------
>- Update Typographical Conventions.
> https://github.com/Pylons/pyramid/pull/2838
>- Add `pyramid_nacl_session
> <http://docs.pylonsproject.org/projects/pyramid-nacl-session/en/latest/>`_
> to session factories. See https://github.com/Pylons/pyramid/issues/2791
>- Update ``HACKING.txt`` from stale branch that was never merged to master.
> See https://github.com/Pylons/pyramid/pull/2782
>- Updated Windows installation instructions and related bits.
> See https://github.com/Pylons/pyramid/issues/2661
>- Fix an inconsistency in the documentation between view predicates and
> route predicates and highlight the differences in their APIs.
> See https://github.com/Pylons/pyramid/pull/2764
>- Clarify a possible misuse of the ``headers`` kwarg to subclasses of
> ``pyramid.httpexceptions.HTTPException`` in which more appropriate
> kwargs from the parent class ``pyramid.response.Response`` should be
> used instead. See https://github.com/Pylons/pyramid/pull/2750
>- The SQLAlchemy + URL Dispatch + Jinja2 (``wiki2``) and
> ZODB + Traversal + Chameleon (``wiki``) tutorials have been updated to
> utilize the new cookiecutters and drop support for the ``pcreate``
> scaffolds.
> See https://github.com/Pylons/pyramid/pull/2881 and
> https://github.com/Pylons/pyramid/pull/2883.
>- Improve output of p* script descriptions for help.
> See https://github.com/Pylons/pyramid/pull/2886
>- Quick Tour updated to use cookiecutters instead of pcreate and scaffolds.
> See https://github.com/Pylons/pyramid/pull/2888
*Got merge conflicts? Close this PR and delete the branch. I'll create a new PR for you.*
Happy merging! 🤖
| 2017-01-26T22:15:06Z | [] | [] |
|
pypi/warehouse | 1,820 | pypi__warehouse-1820 | [
"1818"
] | 490d8fb03bbb7e2565692b010bcde6d1c1e04a35 | diff --git a/warehouse/filters.py b/warehouse/filters.py
--- a/warehouse/filters.py
+++ b/warehouse/filters.py
@@ -94,7 +94,7 @@ def readme(ctx, value, *, format):
element.setAttribute("src", _camo_url(camo_url, camo_key, src))
tree_walker = html5lib.treewalkers.getTreeWalker("dom")
- html_serializer = html5lib.serializer.htmlserializer.HTMLSerializer()
+ html_serializer = html5lib.serializer.HTMLSerializer()
rendered = "".join(html_serializer.serialize(tree_walker(dom)))
return jinja2.Markup(rendered)
| diff --git a/tests/unit/test_filters.py b/tests/unit/test_filters.py
--- a/tests/unit/test_filters.py
+++ b/tests/unit/test_filters.py
@@ -100,7 +100,7 @@ def test_renders_camo(self, monkeypatch):
result = filters.readme(ctx, "raw thing", format="rst")
assert result == jinja2.Markup(
- "<img src=https://camo.example.net/image.jpg>"
+ '<img src="https://camo.example.net/image.jpg">'
)
assert gen_camo_url.calls == [
pretend.call(
| Update bleach to 2.0.0
There's a new version of [bleach](https://pypi.python.org/pypi/bleach) available.
You are currently using **1.5.0**. I have updated it to **2.0.0**
These links might come in handy: <a href="http://pypi.python.org/pypi/bleach">PyPI</a> | <a href="https://pyup.io/changelogs/bleach/">Changelog</a> | <a href="http://github.com/mozilla/bleach">Repo</a> | <a href="http://pythonhosted.org/bleach/">Docs</a>
### Changelog
>
>### 2.0
>-----------------------------
>**Backwards incompatible changes**
>* Removed support for Python 2.6. 206
>* Removed support for Python 3.2. 224
>* Bleach no longer supports html5lib < 0.99999999 (8 9s).
> This version is a rewrite to use the new sanitizing API since the old
> one was dropped in html5lib 0.99999999 (8 9s).
>* ``bleach.clean`` and friends were rewritten
> ``clean`` was reimplemented as an html5lib filter and happens at a different
> step in the HTML parsing -> traversing -> serializing process. Because of
> that, there are some differences in clean's output as compared with previous
> versions.
> Amongst other things, this version will add end tags even if the tag in
> question is to be escaped.
>* ``bleach.clean`` and friends attribute callables now take three arguments:
> tag, attribute name and attribute value. Previously they only took attribute
> name and attribute value.
> All attribute callables will need to be updated.
>* ``bleach.linkify`` was rewritten
> ``linkify`` was reimplemented as an html5lib Filter. As such, it no longer
> accepts a ``tokenizer`` argument.
> The callback functions for adjusting link attributes now takes a namespaced
> attribute.
> Previously you'd do something like this::
> def check_protocol(attrs, is_new):
> if not attrs.get('href', '').startswith('http:', 'https:')):
> return None
> return attrs
> Now it's more like this::
> def check_protocol(attrs, is_new):
> if not attrs.get((None, u'href'), u'').startswith(('http:', 'https:')):
> ^^^^^^^^^^^^^^^
> return None
> return attrs
> Further, you need to make sure you're always using unicode values. If you
> don't then html5lib will raise an assertion error that the value is not
> unicode.
> All linkify filters will need to be updated.
>* ``bleach.linkify`` and friends had a ``skip_pre`` argument--that's been
> replaced with a more general ``skip_tags`` argument.
> Before, you might do::
> bleach.linkify(some_text, skip_pre=True)
> The equivalent with Bleach 2.0 is::
> bleach.linkify(some_text, skip_tags=['pre'])
> You can skip other tags, too, like ``style`` or ``script`` or other places
> where you don't want linkification happening.
> All uses of linkify that use ``skip_pre`` will need to be updated.
>**Changes**
>* Supports Python 3.6.
>* Supports html5lib >= 0.99999999 (8 9s).
>* There's a ``bleach.sanitizer.Cleaner`` class that you can instantiate with your
> favorite clean settings for easy reuse.
>* There's a ``bleach.linkifier.Linker`` class that you can instantiate with your
> favorite linkify settings for easy reuse.
>* There's a ``bleach.linkifier.LinkifyFilter`` which is an htm5lib filter that
> you can pass as a filter to ``bleach.sanitizer.Cleaner`` allowing you to clean
> and linkify in one pass.
>* ``bleach.clean`` and friends can now take a callable as an attributes arg value.
>* Tons of bug fixes.
>* Cleaned up tests.
>* Documentation fixes.
*Got merge conflicts? Close this PR and delete the branch. I'll create a new PR for you.*
Happy merging! 🤖
| 2017-03-08T21:12:25Z | [] | [] |
|
pypi/warehouse | 1,837 | pypi__warehouse-1837 | [
"775"
] | 57c7ce87263256bfd6cff54f0794d20bf70aed0c | diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -142,8 +142,12 @@ def configure(settings=None):
maybe_set(settings, "aws.key_id", "AWS_ACCESS_KEY_ID")
maybe_set(settings, "aws.secret_key", "AWS_SECRET_ACCESS_KEY")
maybe_set(settings, "aws.region", "AWS_REGION")
+ maybe_set(settings, "gcloud.credentials", "GCLOUD_CREDENTIALS")
+ maybe_set(settings, "gcloud.project", "GCLOUD_PROJECT")
+ maybe_set(settings, "warehouse.trending_table", "WAREHOUSE_TRENDING_TABLE")
maybe_set(settings, "celery.broker_url", "AMQP_URL")
maybe_set(settings, "celery.result_url", "REDIS_URL")
+ maybe_set(settings, "celery.scheduler_url", "REDIS_URL")
maybe_set(settings, "database.url", "DATABASE_URL")
maybe_set(settings, "elasticsearch.url", "ELASTICSEARCH_URL")
maybe_set(settings, "sentry.dsn", "SENTRY_DSN")
@@ -322,8 +326,9 @@ def configure(settings=None):
config.include(".search")
- # Register the support for AWS
+ # Register the support for AWS and Google Cloud
config.include(".aws")
+ config.include(".gcloud")
# Register the support for Celery Tasks
config.include(".tasks")
diff --git a/warehouse/gcloud.py b/warehouse/gcloud.py
new file mode 100644
--- /dev/null
+++ b/warehouse/gcloud.py
@@ -0,0 +1,30 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from google.cloud import bigquery
+
+
+def gcloud_bigquery_factory(context, request):
+ credentials = request.registry.settings["gcloud.credentials"]
+ project = request.registry.settings["gcloud.project"]
+
+ return bigquery.Client.from_service_account_json(
+ credentials,
+ project=project,
+ )
+
+
+def includeme(config):
+ config.register_service_factory(
+ gcloud_bigquery_factory,
+ name="gcloud.bigquery",
+ )
diff --git a/warehouse/migrations/versions/5b3f9e687d94_add_a_column_to_project_to_record_the_.py b/warehouse/migrations/versions/5b3f9e687d94_add_a_column_to_project_to_record_the_.py
new file mode 100644
--- /dev/null
+++ b/warehouse/migrations/versions/5b3f9e687d94_add_a_column_to_project_to_record_the_.py
@@ -0,0 +1,33 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Add a column to project to record the zscore
+
+Revision ID: 5b3f9e687d94
+Revises: 7750037b351a
+Create Date: 2017-03-10 02:14:12.402080
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+
+revision = "5b3f9e687d94"
+down_revision = "7750037b351a"
+
+
+def upgrade():
+ op.add_column("packages", sa.Column("zscore", sa.Float(), nullable=True))
+
+
+def downgrade():
+ op.drop_column("packages", "zscore")
diff --git a/warehouse/packaging/__init__.py b/warehouse/packaging/__init__.py
--- a/warehouse/packaging/__init__.py
+++ b/warehouse/packaging/__init__.py
@@ -10,9 +10,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from celery.schedules import crontab
+
from warehouse.packaging.interfaces import IDownloadStatService, IFileStorage
from warehouse.packaging.services import RedisDownloadStatService
from warehouse.packaging.models import Project, Release
+from warehouse.packaging.tasks import compute_trending
def includeme(config):
@@ -43,3 +46,8 @@ def includeme(config):
cache_keys=["project/{obj.project.normalized_name}"],
purge_keys=["project/{obj.project.normalized_name}", "all-projects"],
)
+
+ # Add a periodic task to compute trending once a day, assuming we have
+ # been configured to be able to access BigQuery.
+ if config.get_settings().get("warehouse.trending_table"):
+ config.add_periodic_task(crontab(minute=0, hour=3), compute_trending)
diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py
--- a/warehouse/packaging/models.py
+++ b/warehouse/packaging/models.py
@@ -19,7 +19,7 @@
from pyramid.threadlocal import get_current_request
from sqlalchemy import (
CheckConstraint, Column, Enum, ForeignKey, ForeignKeyConstraint, Index,
- Boolean, DateTime, Integer, Table, Text,
+ Boolean, DateTime, Integer, Float, Table, Text,
)
from sqlalchemy import func, orm, sql
from sqlalchemy.orm import validates
@@ -105,6 +105,7 @@ class Project(SitemapMixin, db.ModelBase):
nullable=False,
server_default=sql.false(),
)
+ zscore = Column(Float, nullable=True)
users = orm.relationship(
User,
diff --git a/warehouse/packaging/tasks.py b/warehouse/packaging/tasks.py
new file mode 100644
--- /dev/null
+++ b/warehouse/packaging/tasks.py
@@ -0,0 +1,102 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from warehouse import tasks
+from warehouse.cache.origin import IOriginCache
+from warehouse.packaging.models import Project
+
+
+@tasks.task(ignore_result=True, acks_late=True)
+def compute_trending(request):
+ bq = request.find_service(name="gcloud.bigquery")
+ query = bq.run_sync_query(
+ """ SELECT project,
+ IF(
+ STDDEV(downloads) > 0,
+ (todays_downloads - AVG(downloads))/STDDEV(downloads),
+ NULL
+ ) as zscore
+ FROM (
+ SELECT project,
+ date,
+ downloads,
+ FIRST_VALUE(downloads) OVER (
+ PARTITION BY project
+ ORDER BY DATE DESC
+ ROWS BETWEEN UNBOUNDED PRECEDING
+ AND UNBOUNDED FOLLOWING
+ ) as todays_downloads
+ FROM (
+ SELECT file.project as project,
+ DATE(timestamp) AS date,
+ COUNT(*) as downloads
+ FROM `{table}`
+ WHERE _TABLE_SUFFIX BETWEEN
+ FORMAT_DATE(
+ "%Y%m%d",
+ DATE_ADD(CURRENT_DATE(), INTERVAL -31 day))
+ AND
+ FORMAT_DATE(
+ "%Y%m%d",
+ DATE_ADD(CURRENT_DATE(), INTERVAL -1 day))
+ GROUP BY file.project, date
+ )
+ )
+ GROUP BY project, todays_downloads
+ HAVING SUM(downloads) >= 5000
+ ORDER BY zscore DESC
+ """.format(table=request.registry.settings["warehouse.trending_table"])
+ )
+ query.use_legacy_sql = False
+ query.run()
+
+ zscores = {}
+ page_token = None
+ while True:
+ rows, total_rows, page_token = query.fetch_data(
+ max_results=1000,
+ page_token=page_token,
+ )
+
+ zscores.update(dict(rows))
+
+ if not page_token:
+ break
+
+ # We're going to "reset" all of our zscores to a steady state where they
+ # are all equal to ``None``. The next query will then set any that have a
+ # value back to the expected value.
+ (request.db.query(Project)
+ .filter(Project.zscore != None) # noqa
+ .update({Project.zscore: None}))
+
+ # We need to convert the normalized name that we get out of BigQuery and
+ # turn it into the primary key of the Project object and construct a list
+ # of primary key: new zscore, including a default of None if the item isn't
+ # in the result set.
+ query = request.db.query(Project.name, Project.normalized_name).all()
+ to_update = [
+ {"name": name, "zscore": zscores[normalized_name]}
+ for name, normalized_name in query
+ if normalized_name in zscores
+ ]
+
+ # Reflect out updated ZScores into the database.
+ request.db.bulk_update_mappings(Project, to_update)
+
+ # Trigger a purge of the trending surrogate key.
+ try:
+ cacher = request.find_service(IOriginCache)
+ except ValueError:
+ pass
+ else:
+ cacher.purge(["trending"])
diff --git a/warehouse/tasks.py b/warehouse/tasks.py
--- a/warehouse/tasks.py
+++ b/warehouse/tasks.py
@@ -51,13 +51,14 @@ def __new__(cls, *args, **kwargs):
def run(*args, **kwargs):
original_run = obj._wh_original_run
request = obj.get_request()
- try:
- with request.tm:
+
+ with request.tm:
+ try:
return original_run(*args, **kwargs)
- except BaseException as exc:
- if request.tm._retryable(exc.__class__, exc):
- raise obj.retry(exc=exc)
- raise
+ except BaseException as exc:
+ if request.tm._retryable(exc.__class__, exc):
+ raise obj.retry(exc=exc)
+ raise
obj._wh_original_run, obj.run = obj.run, run
@@ -143,6 +144,20 @@ def _get_celery_app(config):
return config.registry["celery.app"]
+def _add_periodic_task(config, schedule, func, args=(), kwargs=(), name=None,
+ **opts):
+ def add_task():
+ config.registry["celery.app"].add_periodic_task(
+ schedule,
+ config.task(func).s(),
+ args=args,
+ kwargs=kwargs,
+ name=name,
+ **opts
+ )
+ config.action(None, add_task, order=100)
+
+
def includeme(config):
s = config.registry.settings
@@ -161,6 +176,7 @@ def includeme(config):
task_queue_ha_policy="all",
task_serializer="json",
worker_disable_rate_limits=True,
+ REDBEAT_REDIS_URL=s["celery.scheduler_url"],
)
config.registry["celery.app"].Task = WarehouseTask
config.registry["celery.app"].pyramid_config = config
@@ -169,6 +185,12 @@ def includeme(config):
("celery", "finalize"),
config.registry["celery.app"].finalize,
)
+
+ config.add_directive(
+ "add_periodic_task",
+ _add_periodic_task,
+ action_wrap=False,
+ )
config.add_directive("make_celery_app", _get_celery_app, action_wrap=False)
config.add_directive("task", _get_task_from_config, action_wrap=False)
config.add_request_method(_get_task_from_request, name="task", reify=True)
diff --git a/warehouse/views.py b/warehouse/views.py
--- a/warehouse/views.py
+++ b/warehouse/views.py
@@ -125,16 +125,16 @@ def opensearchxml(request):
1 * 60 * 60, # 1 hour
stale_while_revalidate=10 * 60, # 10 minutes
stale_if_error=1 * 24 * 60 * 60, # 1 day
- keys=["all-projects"],
+ keys=["all-projects", "trending"],
),
]
)
def index(request):
project_names = [
r[0] for r in (
- request.db.query(File.name)
- .group_by(File.name)
- .order_by(func.sum(File.downloads).desc())
+ request.db.query(Project.name)
+ .order_by(Project.zscore.desc().nullslast(),
+ func.random())
.limit(5)
.all())
]
@@ -143,10 +143,12 @@ def index(request):
request.db.query(Release)
.distinct(Release.name)
.filter(Release.name.in_(project_names))
- .order_by(Release.name, Release._pypi_ordering.desc())
+ .order_by(Release.name,
+ Release.is_prerelease.nullslast(),
+ Release._pypi_ordering.desc())
.subquery(),
)
- top_projects = (
+ trending_projects = (
request.db.query(release_a)
.options(joinedload(release_a.project))
.order_by(func.array_idx(project_names, release_a.name))
@@ -175,7 +177,7 @@ def index(request):
return {
"latest_releases": latest_releases,
- "top_projects": top_projects,
+ "trending_projects": trending_projects,
"num_projects": counts.get(Project.__tablename__, 0),
"num_releases": counts.get(Release.__tablename__, 0),
"num_files": counts.get(File.__tablename__, 0),
| diff --git a/tests/conftest.py b/tests/conftest.py
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -133,6 +133,7 @@ def app_config(database):
"camo.key": "insecure key",
"celery.broker_url": "amqp://",
"celery.result_url": "redis://localhost:0/",
+ "celery.scheduler_url": "redis://localhost:0/",
"database.url": database,
"docs.url": "http://docs.example.com/",
"download_stats.url": "redis://localhost:0/",
diff --git a/tests/unit/packaging/test_init.py b/tests/unit/packaging/test_init.py
--- a/tests/unit/packaging/test_init.py
+++ b/tests/unit/packaging/test_init.py
@@ -11,13 +11,18 @@
# limitations under the License.
import pretend
+import pytest
+
+from celery.schedules import crontab
from warehouse import packaging
from warehouse.packaging.interfaces import IDownloadStatService, IFileStorage
from warehouse.packaging.models import Project, Release
+from warehouse.packaging.tasks import compute_trending
-def test_includme(monkeypatch):
+@pytest.mark.parametrize("with_trending", [True, False])
+def test_includme(monkeypatch, with_trending):
storage_class = pretend.stub(create_service=pretend.stub())
download_stat_service_obj = pretend.stub()
@@ -43,6 +48,9 @@ def test_includme(monkeypatch):
},
),
register_origin_cache_keys=pretend.call_recorder(lambda c, **kw: None),
+ get_settings=lambda: (
+ {"warehouse.trending_table": "foobar"} if with_trending else {}),
+ add_periodic_task=pretend.call_recorder(lambda *a, **kw: None),
)
packaging.includeme(config)
@@ -75,3 +83,8 @@ def test_includme(monkeypatch):
],
),
]
+
+ if with_trending:
+ assert config.add_periodic_task.calls == [
+ pretend.call(crontab(minute=0, hour=3), compute_trending),
+ ]
diff --git a/tests/unit/packaging/test_tasks.py b/tests/unit/packaging/test_tasks.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/packaging/test_tasks.py
@@ -0,0 +1,114 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pretend
+import pytest
+
+from warehouse.cache.origin import IOriginCache
+from warehouse.packaging.models import Project
+from warehouse.packaging.tasks import compute_trending
+
+from ...common.db.packaging import ProjectFactory
+
+
+class TestComputeTrending:
+
+ @pytest.mark.parametrize("with_purges", [True, False])
+ def test_computes_trending(self, db_request, with_purges):
+ projects = [
+ ProjectFactory.create(zscore=1 if not i else None)
+ for i in range(3)
+ ]
+
+ results = iter([
+ ([(projects[1].normalized_name, 2)], 2, "blah"),
+ ([(projects[2].normalized_name, -1)], 2, None),
+ ])
+ query = pretend.stub(
+ use_legacy_sql=True,
+ run=pretend.call_recorder(lambda: None),
+ fetch_data=pretend.call_recorder(
+ lambda max_results, page_token: next(results),
+ )
+ )
+ bigquery = pretend.stub(
+ run_sync_query=pretend.call_recorder(lambda q: query),
+ )
+
+ cacher = pretend.stub(purge=pretend.call_recorder(lambda keys: None))
+
+ def find_service(iface=None, name=None):
+ if iface is None and name == "gcloud.bigquery":
+ return bigquery
+
+ if with_purges and issubclass(iface, IOriginCache):
+ return cacher
+
+ raise ValueError
+
+ db_request.find_service = find_service
+ db_request.registry.settings = {
+ "warehouse.trending_table": "example.pypi.downloads*",
+ }
+
+ compute_trending(db_request)
+
+ assert bigquery.run_sync_query.calls == [
+ pretend.call(""" SELECT project,
+ IF(
+ STDDEV(downloads) > 0,
+ (todays_downloads - AVG(downloads))/STDDEV(downloads),
+ NULL
+ ) as zscore
+ FROM (
+ SELECT project,
+ date,
+ downloads,
+ FIRST_VALUE(downloads) OVER (
+ PARTITION BY project
+ ORDER BY DATE DESC
+ ROWS BETWEEN UNBOUNDED PRECEDING
+ AND UNBOUNDED FOLLOWING
+ ) as todays_downloads
+ FROM (
+ SELECT file.project as project,
+ DATE(timestamp) AS date,
+ COUNT(*) as downloads
+ FROM `example.pypi.downloads*`
+ WHERE _TABLE_SUFFIX BETWEEN
+ FORMAT_DATE(
+ "%Y%m%d",
+ DATE_ADD(CURRENT_DATE(), INTERVAL -31 day))
+ AND
+ FORMAT_DATE(
+ "%Y%m%d",
+ DATE_ADD(CURRENT_DATE(), INTERVAL -1 day))
+ GROUP BY file.project, date
+ )
+ )
+ GROUP BY project, todays_downloads
+ HAVING SUM(downloads) >= 5000
+ ORDER BY zscore DESC
+ """),
+ ]
+ assert not query.use_legacy_sql
+ assert query.run.calls == [pretend.call()]
+ assert (cacher.purge.calls ==
+ ([pretend.call(["trending"])] if with_purges else []))
+
+ results = dict(db_request.db.query(Project.name, Project.zscore).all())
+
+ assert results == {
+ projects[0].name: None,
+ projects[1].name: 2,
+ projects[2].name: -1,
+ }
diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py
--- a/tests/unit/test_config.py
+++ b/tests/unit/test_config.py
@@ -331,6 +331,7 @@ def __init__(self):
pretend.call(".policy"),
pretend.call(".search"),
pretend.call(".aws"),
+ pretend.call(".gcloud"),
pretend.call(".tasks"),
pretend.call(".sessions"),
pretend.call(".cache.http"),
diff --git a/tests/unit/test_gcloud.py b/tests/unit/test_gcloud.py
new file mode 100644
--- /dev/null
+++ b/tests/unit/test_gcloud.py
@@ -0,0 +1,56 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pretend
+
+from warehouse import gcloud
+
+
+def test_aws_session_factory(monkeypatch):
+ client = pretend.stub()
+
+ bigquery = pretend.stub(
+ Client=pretend.stub(
+ from_service_account_json=pretend.call_recorder(
+ lambda path, project: client,
+ ),
+ ),
+ )
+ monkeypatch.setattr(gcloud, "bigquery", bigquery)
+
+ request = pretend.stub(
+ registry=pretend.stub(
+ settings={
+ "gcloud.credentials": "/the/path/to/gcloud.json",
+ "gcloud.project": "my-cool-project",
+ },
+ ),
+ )
+
+ assert gcloud.gcloud_bigquery_factory(None, request) is client
+ assert bigquery.Client.from_service_account_json.calls == [
+ pretend.call("/the/path/to/gcloud.json", project="my-cool-project"),
+ ]
+
+
+def test_includeme():
+ config = pretend.stub(
+ register_service_factory=pretend.call_recorder(
+ lambda factory, name: None
+ )
+ )
+
+ gcloud.includeme(config)
+
+ assert config.register_service_factory.calls == [
+ pretend.call(gcloud.gcloud_bigquery_factory, name="gcloud.bigquery"),
+ ]
diff --git a/tests/unit/test_tasks.py b/tests/unit/test_tasks.py
--- a/tests/unit/test_tasks.py
+++ b/tests/unit/test_tasks.py
@@ -271,7 +271,7 @@ def run():
assert request.tm.__enter__.calls == [pretend.call()]
assert request.tm.__exit__.calls == [
- pretend.call(RetryThisException, mock.ANY, mock.ANY),
+ pretend.call(Retry, mock.ANY, mock.ANY),
]
assert request.tm._retryable.calls == [
pretend.call(RetryThisException, mock.ANY),
@@ -386,6 +386,34 @@ def test_get_task_via_config(self):
assert tasks._get_task_from_config(config, task_func)
+def test_add_periodic_task():
+ signature = pretend.stub()
+ task_obj = pretend.stub(s=lambda: signature)
+ celery_app = pretend.stub(
+ add_periodic_task=pretend.call_recorder(lambda *a, **k: None),
+ )
+ actions = []
+ config = pretend.stub(
+ action=pretend.call_recorder(lambda d, f, order: actions.append(f)),
+ registry={"celery.app": celery_app},
+ task=pretend.call_recorder(lambda t: task_obj),
+ )
+
+ schedule = pretend.stub()
+ func = pretend.stub()
+
+ tasks._add_periodic_task(config, schedule, func)
+
+ for action in actions:
+ action()
+
+ assert config.action.calls == [pretend.call(None, mock.ANY, order=100)]
+ assert config.task.calls == [pretend.call(func)]
+ assert celery_app.add_periodic_task.calls == [
+ pretend.call(schedule, signature, args=(), kwargs=(), name=None),
+ ]
+
+
def test_make_celery_app():
celery_app = pretend.stub()
config = pretend.stub(registry={"celery.app": celery_app})
@@ -413,6 +441,7 @@ def test_includeme(env, ssl):
"warehouse.env": env,
"celery.broker_url": pretend.stub(),
"celery.result_url": pretend.stub(),
+ "celery.scheduler_url": pretend.stub(),
},
),
)
@@ -431,12 +460,19 @@ def test_includeme(env, ssl):
"task_serializer": "json",
"accept_content": ["json", "msgpack"],
"result_compression": "gzip",
- "task_queue_ha_policy": "all"}.items():
+ "task_queue_ha_policy": "all",
+ "REDBEAT_REDIS_URL": (
+ config.registry.settings["celery.scheduler_url"])}.items():
assert app.conf[key] == value
assert config.action.calls == [
pretend.call(("celery", "finalize"), app.finalize),
]
assert config.add_directive.calls == [
+ pretend.call(
+ "add_periodic_task",
+ tasks._add_periodic_task,
+ action_wrap=False,
+ ),
pretend.call(
"make_celery_app",
tasks._get_celery_app,
diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py
--- a/tests/unit/test_views.py
+++ b/tests/unit/test_views.py
@@ -93,7 +93,7 @@ def test_index(self, db_request):
assert index(db_request) == {
# assert that ordering is correct
'latest_releases': [release2, release1],
- 'top_projects': [release2],
+ 'trending_projects': [release2],
'num_projects': 1,
'num_users': 3,
'num_releases': 2,
| Have top projects use the last 30 days or so instead of all time
Right now the "top projects" thing on the index page uses the all time download counts. This is OK but it means that they are unlikely to _ever_ really change. Instead it'd be nice if we used the last N days or something like that.
| Even in that case, setuptools/simplejson/pip will basically always dominate. Not to say those aren't decent projects, but if they are the most downloaded, wouldn't one think enough people already know about them?
I'm not sure what your stomach for state is, but "Trending" is probably better. The least complex way I can think to implement this would be to reuse the daily/weekly/monthly counts and look at packages that have, e.g., 500 downloads today and only 3000 downloads for the month or 3000 this week and only 4000 for the month. Plus, filter out newly uploaded packages that have a low total download count (or establish a baseline download count model for new packages).
I agree this should probably use trending instead of the last 30 days. That makes a lot more sense. However, that probably means that this is more work that we should put into it prior to the initial launch so I'm going to bump this out of Become PyPI.
| 2017-03-12T17:55:11Z | [] | [] |
pypi/warehouse | 1,849 | pypi__warehouse-1849 | [
"1529"
] | 170efae8818b03e1327946a7fe860d71e930e068 | diff --git a/warehouse/config.py b/warehouse/config.py
--- a/warehouse/config.py
+++ b/warehouse/config.py
@@ -262,6 +262,7 @@ def configure(settings=None):
"format_package_type",
"warehouse.filters:format_package_type"
)
+ filters.setdefault("parse_version", "warehouse.filters:parse_version")
# We also want to register some global functions for Jinja
jglobals = config.get_settings().setdefault("jinja2.globals", {})
diff --git a/warehouse/filters.py b/warehouse/filters.py
--- a/warehouse/filters.py
+++ b/warehouse/filters.py
@@ -23,6 +23,7 @@
import html5lib.treewalkers
import jinja2
+import packaging.version
import readme_renderer.rst
import readme_renderer.txt
@@ -164,3 +165,7 @@ def contains_valid_uris(items):
URIs
"""
return any(is_valid_uri(i) for i in items)
+
+
+def parse_version(version_str):
+ return packaging.version.parse(version_str)
| diff --git a/tests/functional/test_templates.py b/tests/functional/test_templates.py
--- a/tests/functional/test_templates.py
+++ b/tests/functional/test_templates.py
@@ -42,6 +42,7 @@ def test_templates_for_empty_titles():
"urlparse": "warehouse.filters:urlparse",
"contains_valid_uris": "warehouse.filters:contains_valid_uris",
"format_package_type": "warehouse.filters:format_package_type",
+ "parse_version": "warehouse.filters:parse_version",
})
for dir_, _, files in os.walk(dir_name):
diff --git a/tests/unit/test_filters.py b/tests/unit/test_filters.py
--- a/tests/unit/test_filters.py
+++ b/tests/unit/test_filters.py
@@ -13,6 +13,7 @@
import urllib.parse
import jinja2
+import packaging.version
import pretend
import pytest
import readme_renderer.rst
@@ -246,3 +247,13 @@ def test_contains_valid_uris(inp, expected):
)
def test_format_package_type(inp, expected):
assert filters.format_package_type(inp) == expected
+
+
+@pytest.mark.parametrize(
+ ("inp", "expected"),
+ [
+ ("1.0", packaging.version.Version("1.0")),
+ ]
+)
+def test_parse_version(inp, expected):
+ assert filters.parse_version(inp) == expected
| "Newer version available" not correct if pre-release page
No big deal, but after the release of #1519, when you are on a *recent* preview package the version badge shows "Newer version available" for the latest stable:
![image](https://cloud.githubusercontent.com/assets/1050156/20896960/efa29c9a-bad4-11e6-8a34-40426930a5cd.png)
This is technically false, this is not a newer version, but the latest stable. I humbly think in this case this might confuse people.
This might be true for a 1.0.0rc1 compared to a 1.0.0, but not for a 2.0.0rc1 compared to a 1.0.0.
FYI @brettcannon
| Ah yes. It should maybe be orange or something that just says that this is a pre-release version and the latest stable is X. | 2017-03-15T16:31:09Z | [] | [] |
pypi/warehouse | 1,931 | pypi__warehouse-1931 | [
"661"
] | c1c219d731e19b226395287623856d107456f565 | diff --git a/warehouse/cli/db/__init__.py b/warehouse/cli/db/__init__.py
--- a/warehouse/cli/db/__init__.py
+++ b/warehouse/cli/db/__init__.py
@@ -10,9 +10,31 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import contextlib
+
from warehouse.cli import warehouse
+@contextlib.contextmanager
+def alembic_lock(engine, alembic_config):
+ with engine.begin() as connection:
+ # Attempt to acquire the alembic lock, this will wait until the lock
+ # has been acquired allowing multiple commands to wait for each other.
+ connection.execute("SELECT pg_advisory_lock(hashtext('alembic'))")
+
+ try:
+ # Tell Alembic use our current connection instead of creating it's
+ # own.
+ alembic_config.attributes["connection"] = connection
+
+ # Yield control back up to let the command itself run.
+ yield alembic_config
+ finally:
+ # Finally we need to release the lock we've acquired.
+ connection.execute(
+ "SELECT pg_advisory_unlock(hashtext('alembic'))")
+
+
@warehouse.group() # pragma: no branch
def db():
"""
diff --git a/warehouse/cli/db/branches.py b/warehouse/cli/db/branches.py
--- a/warehouse/cli/db/branches.py
+++ b/warehouse/cli/db/branches.py
@@ -13,7 +13,7 @@
import alembic.command
import click
-from warehouse.cli.db import db
+from warehouse.cli.db import db, alembic_lock
@db.command()
@@ -22,4 +22,6 @@ def branches(config, **kwargs):
"""
Show current branch points.
"""
- alembic.command.branches(config.alembic_config(), **kwargs)
+ with alembic_lock(config.registry["sqlalchemy.engine"],
+ config.alembic_config()) as alembic_config:
+ alembic.command.branches(alembic_config, **kwargs)
diff --git a/warehouse/cli/db/current.py b/warehouse/cli/db/current.py
--- a/warehouse/cli/db/current.py
+++ b/warehouse/cli/db/current.py
@@ -13,7 +13,7 @@
import alembic.command
import click
-from warehouse.cli.db import db
+from warehouse.cli.db import db, alembic_lock
@db.command()
@@ -22,4 +22,6 @@ def current(config, **kwargs):
"""
Display the current revision for a database.
"""
- alembic.command.current(config.alembic_config(), **kwargs)
+ with alembic_lock(config.registry["sqlalchemy.engine"],
+ config.alembic_config()) as alembic_config:
+ alembic.command.current(alembic_config, **kwargs)
diff --git a/warehouse/cli/db/downgrade.py b/warehouse/cli/db/downgrade.py
--- a/warehouse/cli/db/downgrade.py
+++ b/warehouse/cli/db/downgrade.py
@@ -13,7 +13,7 @@
import alembic.command
import click
-from warehouse.cli.db import db
+from warehouse.cli.db import db, alembic_lock
@db.command()
@@ -23,4 +23,6 @@ def downgrade(config, revision, **kwargs):
"""
Revert to a previous version.
"""
- alembic.command.downgrade(config.alembic_config(), revision, **kwargs)
+ with alembic_lock(config.registry["sqlalchemy.engine"],
+ config.alembic_config()) as alembic_config:
+ alembic.command.downgrade(alembic_config, revision, **kwargs)
diff --git a/warehouse/cli/db/heads.py b/warehouse/cli/db/heads.py
--- a/warehouse/cli/db/heads.py
+++ b/warehouse/cli/db/heads.py
@@ -13,7 +13,7 @@
import alembic.command
import click
-from warehouse.cli.db import db
+from warehouse.cli.db import db, alembic_lock
@db.command()
@@ -27,4 +27,6 @@ def heads(config, **kwargs):
"""
Show current available heads.
"""
- alembic.command.heads(config.alembic_config(), **kwargs)
+ with alembic_lock(config.registry["sqlalchemy.engine"],
+ config.alembic_config()) as alembic_config:
+ alembic.command.heads(alembic_config, **kwargs)
diff --git a/warehouse/cli/db/history.py b/warehouse/cli/db/history.py
--- a/warehouse/cli/db/history.py
+++ b/warehouse/cli/db/history.py
@@ -13,7 +13,7 @@
import alembic.command
import click
-from warehouse.cli.db import db
+from warehouse.cli.db import db, alembic_lock
@db.command()
@@ -23,4 +23,6 @@ def history(config, revision_range, **kwargs):
"""
List changeset scripts in chronological order.
"""
- alembic.command.history(config.alembic_config(), revision_range, **kwargs)
+ with alembic_lock(config.registry["sqlalchemy.engine"],
+ config.alembic_config()) as alembic_config:
+ alembic.command.history(alembic_config, revision_range, **kwargs)
diff --git a/warehouse/cli/db/merge.py b/warehouse/cli/db/merge.py
--- a/warehouse/cli/db/merge.py
+++ b/warehouse/cli/db/merge.py
@@ -13,7 +13,7 @@
import alembic.command
import click
-from warehouse.cli.db import db
+from warehouse.cli.db import db, alembic_lock
@db.command()
@@ -36,4 +36,6 @@ def merge(config, revisions, **kwargs):
Takes one or more revisions or "heads" for all heads and merges them into
a single revision.
"""
- alembic.command.merge(config.alembic_config(), revisions, **kwargs)
+ with alembic_lock(config.registry["sqlalchemy.engine"],
+ config.alembic_config()) as alembic_config:
+ alembic.command.merge(alembic_config, revisions, **kwargs)
diff --git a/warehouse/cli/db/revision.py b/warehouse/cli/db/revision.py
--- a/warehouse/cli/db/revision.py
+++ b/warehouse/cli/db/revision.py
@@ -13,7 +13,7 @@
import alembic.command
import click
-from warehouse.cli.db import db
+from warehouse.cli.db import db, alembic_lock
@db.command()
@@ -52,4 +52,6 @@ def revision(config, **kwargs):
"""
Create a new revision file.
"""
- alembic.command.revision(config.alembic_config(), **kwargs)
+ with alembic_lock(config.registry["sqlalchemy.engine"],
+ config.alembic_config()) as alembic_config:
+ alembic.command.revision(alembic_config, **kwargs)
diff --git a/warehouse/cli/db/show.py b/warehouse/cli/db/show.py
--- a/warehouse/cli/db/show.py
+++ b/warehouse/cli/db/show.py
@@ -13,7 +13,7 @@
import alembic.command
import click
-from warehouse.cli.db import db
+from warehouse.cli.db import db, alembic_lock
@db.command()
@@ -23,4 +23,6 @@ def show(config, revision, **kwargs):
"""
Show the revision(s) denoted by the given symbol.
"""
- alembic.command.show(config.alembic_config(), revision, **kwargs)
+ with alembic_lock(config.registry["sqlalchemy.engine"],
+ config.alembic_config()) as alembic_config:
+ alembic.command.show(alembic_config, revision, **kwargs)
diff --git a/warehouse/cli/db/stamp.py b/warehouse/cli/db/stamp.py
--- a/warehouse/cli/db/stamp.py
+++ b/warehouse/cli/db/stamp.py
@@ -13,7 +13,7 @@
import alembic.command
import click
-from warehouse.cli.db import db
+from warehouse.cli.db import db, alembic_lock
@db.command()
@@ -23,4 +23,6 @@ def stamp(config, revision, **kwargs):
"""
Stamp the revision table with the given revision.
"""
- alembic.command.stamp(config.alembic_config(), revision, **kwargs)
+ with alembic_lock(config.registry["sqlalchemy.engine"],
+ config.alembic_config()) as alembic_config:
+ alembic.command.stamp(alembic_config, revision, **kwargs)
diff --git a/warehouse/cli/db/upgrade.py b/warehouse/cli/db/upgrade.py
--- a/warehouse/cli/db/upgrade.py
+++ b/warehouse/cli/db/upgrade.py
@@ -13,7 +13,7 @@
import alembic.command
import click
-from warehouse.cli.db import db
+from warehouse.cli.db import db, alembic_lock
@db.command()
@@ -23,4 +23,6 @@ def upgrade(config, revision, **kwargs):
"""
Upgrade database.
"""
- alembic.command.upgrade(config.alembic_config(), revision, **kwargs)
+ with alembic_lock(config.registry["sqlalchemy.engine"],
+ config.alembic_config()) as alembic_config:
+ alembic.command.upgrade(alembic_config, revision, **kwargs)
diff --git a/warehouse/migrations/env.py b/warehouse/migrations/env.py
--- a/warehouse/migrations/env.py
+++ b/warehouse/migrations/env.py
@@ -42,18 +42,17 @@ def run_migrations_online():
In this scenario we need to create an Engine
and associate a connection with the context.
"""
- options = context.config.get_section(context.config.config_ini_section)
- url = options.pop("url")
- engine = create_engine(url, poolclass=pool.NullPool)
+ connectable = context.config.attributes.get("connection", None)
- connection = engine.connect()
- context.configure(connection=connection, target_metadata=db.metadata)
+ if connectable is None:
+ options = context.config.get_section(context.config.config_ini_section)
+ url = options.pop("url")
+ connectable = create_engine(url, poolclass=pool.NullPool)
- try:
+ with connectable.connect() as connection:
+ context.configure(connection=connection, target_metadata=db.metadata)
with context.begin_transaction():
context.run_migrations()
- finally:
- connection.close()
if context.is_offline_mode():
| diff --git a/tests/unit/cli/test_db.py b/tests/unit/cli/test_db.py
--- a/tests/unit/cli/test_db.py
+++ b/tests/unit/cli/test_db.py
@@ -30,11 +30,24 @@ def test_branches_command(monkeypatch, cli, pyramid_config):
alembic_branches = pretend.call_recorder(lambda config: None)
monkeypatch.setattr(alembic.command, "branches", alembic_branches)
- alembic_config = pretend.stub()
+ alembic_config = pretend.stub(attributes={})
pyramid_config.alembic_config = lambda: alembic_config
+ connection = pretend.stub(
+ __enter__=lambda: connection,
+ __exit__=lambda *a, **k: None,
+ execute=pretend.call_recorder(lambda sql: None),
+ )
+ engine = pretend.stub(begin=lambda: connection)
+ pyramid_config.registry["sqlalchemy.engine"] = engine
+
result = cli.invoke(branches, obj=pyramid_config)
assert result.exit_code == 0
+ assert alembic_config.attributes == {"connection": connection}
+ assert connection.execute.calls == [
+ pretend.call("SELECT pg_advisory_lock(hashtext('alembic'))"),
+ pretend.call("SELECT pg_advisory_unlock(hashtext('alembic'))"),
+ ]
assert alembic_branches.calls == [pretend.call(alembic_config)]
@@ -42,11 +55,24 @@ def test_current_command(monkeypatch, cli, pyramid_config):
alembic_current = pretend.call_recorder(lambda config: None)
monkeypatch.setattr(alembic.command, "current", alembic_current)
- alembic_config = pretend.stub()
+ alembic_config = pretend.stub(attributes={})
pyramid_config.alembic_config = lambda: alembic_config
+ connection = pretend.stub(
+ __enter__=lambda: connection,
+ __exit__=lambda *a, **k: None,
+ execute=pretend.call_recorder(lambda sql: None),
+ )
+ engine = pretend.stub(begin=lambda: connection)
+ pyramid_config.registry["sqlalchemy.engine"] = engine
+
result = cli.invoke(current, obj=pyramid_config)
assert result.exit_code == 0
+ assert alembic_config.attributes == {"connection": connection}
+ assert connection.execute.calls == [
+ pretend.call("SELECT pg_advisory_lock(hashtext('alembic'))"),
+ pretend.call("SELECT pg_advisory_unlock(hashtext('alembic'))"),
+ ]
assert alembic_current.calls == [pretend.call(alembic_config)]
@@ -54,11 +80,24 @@ def test_downgrade_command(monkeypatch, cli, pyramid_config):
alembic_downgrade = pretend.call_recorder(lambda config, revision: None)
monkeypatch.setattr(alembic.command, "downgrade", alembic_downgrade)
- alembic_config = pretend.stub()
+ alembic_config = pretend.stub(attributes={})
pyramid_config.alembic_config = lambda: alembic_config
+ connection = pretend.stub(
+ __enter__=lambda: connection,
+ __exit__=lambda *a, **k: None,
+ execute=pretend.call_recorder(lambda sql: None),
+ )
+ engine = pretend.stub(begin=lambda: connection)
+ pyramid_config.registry["sqlalchemy.engine"] = engine
+
result = cli.invoke(downgrade, ["--", "-1"], obj=pyramid_config)
assert result.exit_code == 0
+ assert alembic_config.attributes == {"connection": connection}
+ assert connection.execute.calls == [
+ pretend.call("SELECT pg_advisory_lock(hashtext('alembic'))"),
+ pretend.call("SELECT pg_advisory_unlock(hashtext('alembic'))"),
+ ]
assert alembic_downgrade.calls == [pretend.call(alembic_config, "-1")]
@@ -76,11 +115,24 @@ def test_heads_command(monkeypatch, cli, pyramid_config, args, ekwargs):
)
monkeypatch.setattr(alembic.command, "heads", alembic_heads)
- alembic_config = pretend.stub()
+ alembic_config = pretend.stub(attributes={})
pyramid_config.alembic_config = lambda: alembic_config
+ connection = pretend.stub(
+ __enter__=lambda: connection,
+ __exit__=lambda *a, **k: None,
+ execute=pretend.call_recorder(lambda sql: None),
+ )
+ engine = pretend.stub(begin=lambda: connection)
+ pyramid_config.registry["sqlalchemy.engine"] = engine
+
result = cli.invoke(heads, args, obj=pyramid_config)
assert result.exit_code == 0
+ assert alembic_config.attributes == {"connection": connection}
+ assert connection.execute.calls == [
+ pretend.call("SELECT pg_advisory_lock(hashtext('alembic'))"),
+ pretend.call("SELECT pg_advisory_unlock(hashtext('alembic'))"),
+ ]
assert alembic_heads.calls == [pretend.call(alembic_config, **ekwargs)]
@@ -88,11 +140,24 @@ def test_history_command(monkeypatch, cli, pyramid_config):
alembic_history = pretend.call_recorder(lambda config, range: None)
monkeypatch.setattr(alembic.command, "history", alembic_history)
- alembic_config = pretend.stub()
+ alembic_config = pretend.stub(attributes={})
pyramid_config.alembic_config = lambda: alembic_config
+ connection = pretend.stub(
+ __enter__=lambda: connection,
+ __exit__=lambda *a, **k: None,
+ execute=pretend.call_recorder(lambda sql: None),
+ )
+ engine = pretend.stub(begin=lambda: connection)
+ pyramid_config.registry["sqlalchemy.engine"] = engine
+
result = cli.invoke(history, ["foo:bar"], obj=pyramid_config)
assert result.exit_code == 0
+ assert alembic_config.attributes == {"connection": connection}
+ assert connection.execute.calls == [
+ pretend.call("SELECT pg_advisory_lock(hashtext('alembic'))"),
+ pretend.call("SELECT pg_advisory_unlock(hashtext('alembic'))"),
+ ]
assert alembic_history.calls == [pretend.call(alembic_config, "foo:bar")]
@@ -123,11 +188,24 @@ def test_merge_command(monkeypatch, cli, pyramid_config, args, eargs, ekwargs):
)
monkeypatch.setattr(alembic.command, "merge", alembic_merge)
- alembic_config = pretend.stub()
+ alembic_config = pretend.stub(attributes={})
pyramid_config.alembic_config = lambda: alembic_config
+ connection = pretend.stub(
+ __enter__=lambda: connection,
+ __exit__=lambda *a, **k: None,
+ execute=pretend.call_recorder(lambda sql: None),
+ )
+ engine = pretend.stub(begin=lambda: connection)
+ pyramid_config.registry["sqlalchemy.engine"] = engine
+
result = cli.invoke(merge, args, obj=pyramid_config)
assert result.exit_code == 0
+ assert alembic_config.attributes == {"connection": connection}
+ assert connection.execute.calls == [
+ pretend.call("SELECT pg_advisory_lock(hashtext('alembic'))"),
+ pretend.call("SELECT pg_advisory_unlock(hashtext('alembic'))"),
+ ]
assert alembic_merge.calls == [
pretend.call(alembic_config, *eargs, **ekwargs),
]
@@ -167,11 +245,24 @@ def test_revision_command(monkeypatch, cli, pyramid_config, args, ekwargs):
)
monkeypatch.setattr(alembic.command, "revision", alembic_revision)
- alembic_config = pretend.stub()
+ alembic_config = pretend.stub(attributes={})
pyramid_config.alembic_config = lambda: alembic_config
+ connection = pretend.stub(
+ __enter__=lambda: connection,
+ __exit__=lambda *a, **k: None,
+ execute=pretend.call_recorder(lambda sql: None),
+ )
+ engine = pretend.stub(begin=lambda: connection)
+ pyramid_config.registry["sqlalchemy.engine"] = engine
+
result = cli.invoke(revision, args, obj=pyramid_config)
assert result.exit_code == 0
+ assert alembic_config.attributes == {"connection": connection}
+ assert connection.execute.calls == [
+ pretend.call("SELECT pg_advisory_lock(hashtext('alembic'))"),
+ pretend.call("SELECT pg_advisory_unlock(hashtext('alembic'))"),
+ ]
assert alembic_revision.calls == [pretend.call(alembic_config, **ekwargs)]
@@ -179,11 +270,24 @@ def test_show_command(monkeypatch, cli, pyramid_config):
alembic_show = pretend.call_recorder(lambda config, revision: None)
monkeypatch.setattr(alembic.command, "show", alembic_show)
- alembic_config = pretend.stub()
+ alembic_config = pretend.stub(attributes={})
pyramid_config.alembic_config = lambda: alembic_config
+ connection = pretend.stub(
+ __enter__=lambda: connection,
+ __exit__=lambda *a, **k: None,
+ execute=pretend.call_recorder(lambda sql: None),
+ )
+ engine = pretend.stub(begin=lambda: connection)
+ pyramid_config.registry["sqlalchemy.engine"] = engine
+
result = cli.invoke(show, ["foo"], obj=pyramid_config)
assert result.exit_code == 0
+ assert alembic_config.attributes == {"connection": connection}
+ assert connection.execute.calls == [
+ pretend.call("SELECT pg_advisory_lock(hashtext('alembic'))"),
+ pretend.call("SELECT pg_advisory_unlock(hashtext('alembic'))"),
+ ]
assert alembic_show.calls == [pretend.call(alembic_config, "foo")]
@@ -191,11 +295,24 @@ def test_stamp_command(monkeypatch, cli, pyramid_config):
alembic_stamp = pretend.call_recorder(lambda config, revision: None)
monkeypatch.setattr(alembic.command, "stamp", alembic_stamp)
- alembic_config = pretend.stub()
+ alembic_config = pretend.stub(attributes={})
pyramid_config.alembic_config = lambda: alembic_config
+ connection = pretend.stub(
+ __enter__=lambda: connection,
+ __exit__=lambda *a, **k: None,
+ execute=pretend.call_recorder(lambda sql: None),
+ )
+ engine = pretend.stub(begin=lambda: connection)
+ pyramid_config.registry["sqlalchemy.engine"] = engine
+
result = cli.invoke(stamp, ["foo"], obj=pyramid_config)
assert result.exit_code == 0
+ assert alembic_config.attributes == {"connection": connection}
+ assert connection.execute.calls == [
+ pretend.call("SELECT pg_advisory_lock(hashtext('alembic'))"),
+ pretend.call("SELECT pg_advisory_unlock(hashtext('alembic'))"),
+ ]
assert alembic_stamp.calls == [pretend.call(alembic_config, "foo")]
@@ -203,9 +320,22 @@ def test_upgrade_command(monkeypatch, cli, pyramid_config):
alembic_upgrade = pretend.call_recorder(lambda config, revision: None)
monkeypatch.setattr(alembic.command, "upgrade", alembic_upgrade)
- alembic_config = pretend.stub()
+ alembic_config = pretend.stub(attributes={})
pyramid_config.alembic_config = lambda: alembic_config
+ connection = pretend.stub(
+ __enter__=lambda: connection,
+ __exit__=lambda *a, **k: None,
+ execute=pretend.call_recorder(lambda sql: None),
+ )
+ engine = pretend.stub(begin=lambda: connection)
+ pyramid_config.registry["sqlalchemy.engine"] = engine
+
result = cli.invoke(upgrade, ["foo"], obj=pyramid_config)
assert result.exit_code == 0
+ assert alembic_config.attributes == {"connection": connection}
+ assert connection.execute.calls == [
+ pretend.call("SELECT pg_advisory_lock(hashtext('alembic'))"),
+ pretend.call("SELECT pg_advisory_unlock(hashtext('alembic'))"),
+ ]
assert alembic_upgrade.calls == [pretend.call(alembic_config, "foo")]
| Automatically Run Database Migrations
Right now there's no way to automatically run our database migrations, however it would be great if there were (I think?). At the moment whenever there is a migration to apply I have to manually run it. Given that we're on Heroku and we're using pipelines I'm not sure we have very many options to run our migrations. The only idea I can think of is to run them as part of the application starting up, which has problems with the fact we have multiple Dynos running in production.
So I guess the question is, should we run them automatically and if we should, is there anyplace better to run them than on application startup?
| After talking to @ewdurbin I think the way we're going to go with this, is we'll have auto migrations _only_ when the staging site deploys because that deploys from Github automatically. On top of that, we'll add a wrapper around the `heroku pipeline:promote` command that won't allow you to trigger a promotion if there is a pending database migration to run.
This will require us to run our migrations remotely, which means we'll need to be careful with how our migrations are handled. We don't want to make any migrations that will remove or alter something that the currently running copy of code will require, nor add anything that the code won't be setup to handle.
| 2017-04-19T17:31:34Z | [] | [] |
pypi/warehouse | 2,023 | pypi__warehouse-2023 | [
"1712"
] | 2c1f3507e1323f330f95a083d5ea9a42945967b2 | diff --git a/warehouse/cli/search/reindex.py b/warehouse/cli/search/reindex.py
--- a/warehouse/cli/search/reindex.py
+++ b/warehouse/cli/search/reindex.py
@@ -70,6 +70,7 @@ def reindex(config, **kwargs):
random_token = binascii.hexlify(os.urandom(5)).decode("ascii")
new_index_name = "{}-{}".format(index_base, random_token)
doc_types = config.registry.get("search.doc_types", set())
+ shards = config.registry.get("elasticsearch.shards", 1)
# Create the new index with zero replicas and index refreshes disabled
# while we are bulk indexing.
@@ -77,10 +78,11 @@ def reindex(config, **kwargs):
new_index_name,
doc_types,
using=client,
- shards=config.registry.get("elasticsearch.shards", 1),
+ shards=shards,
replicas=0,
interval="-1",
)
+ new_index.create(wait_for_active_shards=shards)
# From this point on, if any error occurs, we want to be able to delete our
# in progress index.
| diff --git a/tests/unit/cli/search/test_reindex.py b/tests/unit/cli/search/test_reindex.py
--- a/tests/unit/cli/search/test_reindex.py
+++ b/tests/unit/cli/search/test_reindex.py
@@ -63,6 +63,7 @@ def __init__(self):
self.put_settings = pretend.call_recorder(lambda *a, **kw: None)
self.forcemerge = pretend.call_recorder(lambda *a, **kw: None)
self.delete = pretend.call_recorder(lambda *a, **kw: None)
+ self.create = pretend.call_recorder(lambda *a, **kw: None)
def exists_alias(self, name):
return name in self.aliases
@@ -184,6 +185,7 @@ def project_docs(db):
registry={
"elasticsearch.client": es_client,
"elasticsearch.index": "warehouse",
+ "elasticsearch.shards": 42,
"sqlalchemy.engine": db_engine,
},
)
@@ -201,9 +203,22 @@ def project_docs(db):
assert sess_obj.execute.calls == [
pretend.call("SET statement_timeout = '600s'"),
]
- assert parallel_bulk .calls == [pretend.call(es_client, docs)]
+ assert parallel_bulk.calls == [pretend.call(es_client, docs)]
assert sess_obj.rollback.calls == [pretend.call()]
assert sess_obj.close.calls == [pretend.call()]
+ assert es_client.indices.create.calls == [
+ pretend.call(
+ body={
+ 'settings': {
+ 'number_of_shards': 42,
+ 'number_of_replicas': 0,
+ 'refresh_interval': '-1',
+ }
+ },
+ wait_for_active_shards=42,
+ index='warehouse-cbcbcbcbcb',
+ )
+ ]
assert es_client.indices.delete.calls == []
assert es_client.indices.aliases == {
"warehouse": ["warehouse-cbcbcbcbcb"],
@@ -252,6 +267,7 @@ def project_docs(db):
registry={
"elasticsearch.client": es_client,
"elasticsearch.index": "warehouse",
+ "elasticsearch.shards": 42,
"sqlalchemy.engine": db_engine,
},
)
@@ -272,6 +288,19 @@ def project_docs(db):
assert parallel_bulk.calls == [pretend.call(es_client, docs)]
assert sess_obj.rollback.calls == [pretend.call()]
assert sess_obj.close.calls == [pretend.call()]
+ assert es_client.indices.create.calls == [
+ pretend.call(
+ body={
+ 'settings': {
+ 'number_of_shards': 42,
+ 'number_of_replicas': 0,
+ 'refresh_interval': '-1',
+ }
+ },
+ wait_for_active_shards=42,
+ index='warehouse-cbcbcbcbcb',
+ )
+ ]
assert es_client.indices.delete.calls == [
pretend.call('warehouse-aaaaaaaaaa'),
]
| Search by topic broken ?
Browsing packages, then choosing the Internet / WWW9HTTP / Browser topics gives no result:
https://pypi.org/search/?q=&o=&c=Topic+%3A%3A+Internet+%3A%3A+WWW%2FHTTP+%3A%3A+Browsers
There should be at least the [mechanoid package](https://pypi.org/project/mechanoid/)
Using firefox 50.1:0 on Ubuntu 16.04
| This does indeed seem to be a bug with filtering by any classifier, thanks for reporting it. | 2017-05-22T05:36:30Z | [] | [] |