text
stringlengths
4
288k
id
stringlengths
17
110
metadata
dict
__index_level_0__
int64
0
47
{% extends "base_template.html" %} {% block content %} <h2>Author: {{ author.first_name }} {{ author.last_name }}</h2> <div class="author_dates"> <p>Born in {{author.date_of_birth}}</p> {% if author.date_of_death %} <p>Died in {{ author.date_of_death }}</p> {% endif %} </div> <div class="books" style="margin-right: 50px;"> <h2>Books</h2> {% for copy in author.book_set.all %} <a href="{{copy.get_absolute_url}}">{{copy.title}}</a> <div class="book_details"> <p>{{ copy.summary }}</p> </div> {% endfor %} </div> {% endblock %}
Django-locallibrary/LocalLibrary/catalog/Templates/author_info.html/0
{ "file_path": "Django-locallibrary/LocalLibrary/catalog/Templates/author_info.html", "repo_id": "Django-locallibrary", "token_count": 482 }
0
from django.db import models from django.db.models.base import Model from django.urls import reverse from django.contrib.auth.models import User import uuid from datetime import date class Author(models.Model): first_name = models.CharField(max_length=100) last_name = models.CharField(max_length=100) date_of_birth = models.DateField(null=True,blank=True) date_of_death = models.DateField('Died',null=True,blank=True) class Meta: ordering = ['last_name','first_name'] def get_absolute_url(self): return reverse('author-detail',args=[str(self.id)]) def __str__(self): return f'{self.first_name}, {self.last_name}' class Genre(models.Model): name = models.CharField(max_length=200,help_text="Enter a book genre (eg. Science,Fiction...") def __str__(self): return self.name class Book(models.Model): title = models.CharField(max_length=200) author = models.ForeignKey('Author',on_delete=models.SET_NULL, null=True) summary = models.TextField(max_length=1000,help_text='Enter a brief description of the book') isbn = models.CharField('ISBN',max_length=13,unique=True,help_text='13 Character <a href="https://isbn-international.org/content/what-isbn">ISBN Number</a>') genre = models.ManyToManyField(Genre,help_text='select a genre for the book') def __str__(self): return self.title def get_absolute_url(self): return reverse('book-detail',args=[str(self.id)]) class BookInstance(models.Model): id = models.UUIDField(primary_key=True,default=uuid.uuid4,help_text='Unique ID for this Particular Book across the whole library') book = models.ForeignKey('Book',on_delete=models.RESTRICT,null=True) imprint = models.CharField(max_length=200,help_text='Imprint Date') due_back = models.DateField(null=True,blank=True) LOAN_STATUS = ( ('m', 'Maintainance'), ('o', 'On Loan'), ('a', 'Available'), ('r', 'Reserved'), ) status = models.CharField(max_length=1,choices=LOAN_STATUS,blank=True,default='m',help_text='Book Availability') borrower = models.ForeignKey(User, on_delete=models.SET_NULL, null=True, blank=True) @property def is_overdue(self): if self.due_back and date.today() > self.due_back: return True return False class Meta: ordering = ['due_back'] permissions = (('can_mark_returned','Set book as returned'),) def __str__(self): return f'{self.id} ({self.book.title})'
Django-locallibrary/LocalLibrary/catalog/models.py/0
{ "file_path": "Django-locallibrary/LocalLibrary/catalog/models.py", "repo_id": "Django-locallibrary", "token_count": 981 }
1
/* CHANGELISTS */ #changelist { display: flex; align-items: flex-start; justify-content: space-between; } #changelist .changelist-form-container { flex: 1 1 auto; min-width: 0; } #changelist table { width: 100%; } .change-list .hiddenfields { display:none; } .change-list .filtered table { border-right: none; } .change-list .filtered { min-height: 400px; } .change-list .filtered .results, .change-list .filtered .paginator, .filtered #toolbar, .filtered div.xfull { width: auto; } .change-list .filtered table tbody th { padding-right: 1em; } #changelist-form .results { overflow-x: auto; width: 100%; } #changelist .toplinks { border-bottom: 1px solid var(--hairline-color); } #changelist .paginator { color: var(--body-quiet-color); border-bottom: 1px solid var(--hairline-color); background: var(--body-bg); overflow: hidden; } /* CHANGELIST TABLES */ #changelist table thead th { padding: 0; white-space: nowrap; vertical-align: middle; } #changelist table thead th.action-checkbox-column { width: 1.5em; text-align: center; } #changelist table tbody td.action-checkbox { text-align: center; } #changelist table tfoot { color: var(--body-quiet-color); } /* TOOLBAR */ #toolbar { padding: 8px 10px; margin-bottom: 15px; border-top: 1px solid var(--hairline-color); border-bottom: 1px solid var(--hairline-color); background: var(--darkened-bg); color: var(--body-quiet-color); } #toolbar form input { border-radius: 4px; font-size: 14px; padding: 5px; color: var(--body-fg); } #toolbar #searchbar { height: 19px; border: 1px solid var(--border-color); padding: 2px 5px; margin: 0; vertical-align: top; font-size: 13px; max-width: 100%; } #toolbar #searchbar:focus { border-color: var(--body-quiet-color); } #toolbar form input[type="submit"] { border: 1px solid var(--border-color); font-size: 13px; padding: 4px 8px; margin: 0; vertical-align: middle; background: var(--body-bg); box-shadow: 0 -15px 20px -10px rgba(0, 0, 0, 0.15) inset; cursor: pointer; color: var(--body-fg); } #toolbar form input[type="submit"]:focus, #toolbar form input[type="submit"]:hover { border-color: var(--body-quiet-color); } #changelist-search img { vertical-align: middle; margin-right: 4px; } /* FILTER COLUMN */ #changelist-filter { flex: 0 0 240px; order: 1; background: var(--darkened-bg); border-left: none; margin: 0 0 0 30px; } #changelist-filter h2 { font-size: 14px; text-transform: uppercase; letter-spacing: 0.5px; padding: 5px 15px; margin-bottom: 12px; border-bottom: none; } #changelist-filter h3 { font-weight: 400; padding: 0 15px; margin-bottom: 10px; } #changelist-filter ul { margin: 5px 0; padding: 0 15px 15px; border-bottom: 1px solid var(--hairline-color); } #changelist-filter ul:last-child { border-bottom: none; } #changelist-filter li { list-style-type: none; margin-left: 0; padding-left: 0; } #changelist-filter a { display: block; color: var(--body-quiet-color); text-overflow: ellipsis; overflow-x: hidden; } #changelist-filter li.selected { border-left: 5px solid var(--hairline-color); padding-left: 10px; margin-left: -15px; } #changelist-filter li.selected a { color: var(--link-selected-fg); } #changelist-filter a:focus, #changelist-filter a:hover, #changelist-filter li.selected a:focus, #changelist-filter li.selected a:hover { color: var(--link-hover-color); } #changelist-filter #changelist-filter-clear a { font-size: 13px; padding-bottom: 10px; border-bottom: 1px solid var(--hairline-color); } /* DATE DRILLDOWN */ .change-list ul.toplinks { display: block; float: left; padding: 0; margin: 0; width: 100%; } .change-list ul.toplinks li { padding: 3px 6px; font-weight: bold; list-style-type: none; display: inline-block; } .change-list ul.toplinks .date-back a { color: var(--body-quiet-color); } .change-list ul.toplinks .date-back a:focus, .change-list ul.toplinks .date-back a:hover { color: var(--link-hover-color); } /* PAGINATOR */ .paginator { font-size: 13px; padding-top: 10px; padding-bottom: 10px; line-height: 22px; margin: 0; border-top: 1px solid var(--hairline-color); width: 100%; } .paginator a:link, .paginator a:visited { padding: 2px 6px; background: var(--button-bg); text-decoration: none; color: var(--button-fg); } .paginator a.showall { border: none; background: none; color: var(--link-fg); } .paginator a.showall:focus, .paginator a.showall:hover { background: none; color: var(--link-hover-color); } .paginator .end { margin-right: 6px; } .paginator .this-page { padding: 2px 6px; font-weight: bold; font-size: 13px; vertical-align: top; } .paginator a:focus, .paginator a:hover { color: white; background: var(--link-hover-color); } /* ACTIONS */ .filtered .actions { border-right: none; } #changelist table input { margin: 0; vertical-align: baseline; } #changelist table tbody tr.selected { background-color: var(--selected-row); } #changelist .actions { padding: 10px; background: var(--body-bg); border-top: none; border-bottom: none; line-height: 24px; color: var(--body-quiet-color); width: 100%; } #changelist .actions.selected { /* XXX Probably unused? */ background: var(--body-bg); border-top: 1px solid var(--body-bg); border-bottom: 1px solid #edecd6; } #changelist .actions span.all, #changelist .actions span.action-counter, #changelist .actions span.clear, #changelist .actions span.question { font-size: 13px; margin: 0 0.5em; } #changelist .actions:last-child { border-bottom: none; } #changelist .actions select { vertical-align: top; height: 24px; color: var(--body-fg); border: 1px solid var(--border-color); border-radius: 4px; font-size: 14px; padding: 0 0 0 4px; margin: 0; margin-left: 10px; } #changelist .actions select:focus { border-color: var(--body-quiet-color); } #changelist .actions label { display: inline-block; vertical-align: middle; font-size: 13px; } #changelist .actions .button { font-size: 13px; border: 1px solid var(--border-color); border-radius: 4px; background: var(--body-bg); box-shadow: 0 -15px 20px -10px rgba(0, 0, 0, 0.15) inset; cursor: pointer; height: 24px; line-height: 1; padding: 4px 8px; margin: 0; color: var(--body-fg); } #changelist .actions .button:focus, #changelist .actions .button:hover { border-color: var(--body-quiet-color); }
Django-locallibrary/LocalLibrary/staticfiles/admin/css/changelists.c70d77c47e69.css/0
{ "file_path": "Django-locallibrary/LocalLibrary/staticfiles/admin/css/changelists.c70d77c47e69.css", "repo_id": "Django-locallibrary", "token_count": 2912 }
2
/* LOGIN FORM */ .login { background: var(--darkened-bg); height: auto; } .login #header { height: auto; padding: 15px 16px; justify-content: center; } .login #header h1 { font-size: 18px; } .login #header h1 a { color: var(--header-link-color); } .login #content { padding: 20px 20px 0; } .login #container { background: var(--body-bg); border: 1px solid var(--hairline-color); border-radius: 4px; overflow: hidden; width: 28em; min-width: 300px; margin: 100px auto; height: auto; } .login .form-row { padding: 4px 0; } .login .form-row label { display: block; line-height: 2em; } .login .form-row #id_username, .login .form-row #id_password { padding: 8px; width: 100%; box-sizing: border-box; } .login .submit-row { padding: 1em 0 0 0; margin: 0; text-align: center; } .login .password-reset-link { text-align: center; }
Django-locallibrary/LocalLibrary/staticfiles/admin/css/login.c35adf41bb6e.css/0
{ "file_path": "Django-locallibrary/LocalLibrary/staticfiles/admin/css/login.c35adf41bb6e.css", "repo_id": "Django-locallibrary", "token_count": 411 }
3
/* GLOBAL */ th { text-align: right; } .module h2, .module caption { text-align: right; } .module ul, .module ol { margin-left: 0; margin-right: 1.5em; } .viewlink, .addlink, .changelink { padding-left: 0; padding-right: 16px; background-position: 100% 1px; } .deletelink { padding-left: 0; padding-right: 16px; background-position: 100% 1px; } .object-tools { float: left; } thead th:first-child, tfoot td:first-child { border-left: none; } /* LAYOUT */ #user-tools { right: auto; left: 0; text-align: left; } div.breadcrumbs { text-align: right; } #content-main { float: right; } #content-related { float: left; margin-left: -300px; margin-right: auto; } .colMS { margin-left: 300px; margin-right: 0; } /* SORTABLE TABLES */ table thead th.sorted .sortoptions { float: left; } thead th.sorted .text { padding-right: 0; padding-left: 42px; } /* dashboard styles */ .dashboard .module table td a { padding-left: .6em; padding-right: 16px; } /* changelists styles */ .change-list .filtered table { border-left: none; border-right: 0px none; } #changelist-filter { border-left: none; border-right: none; margin-left: 0; margin-right: 30px; } #changelist-filter li.selected { border-left: none; padding-left: 10px; margin-left: 0; border-right: 5px solid var(--hairline-color); padding-right: 10px; margin-right: -15px; } #changelist table tbody td:first-child, #changelist table tbody th:first-child { border-right: none; border-left: none; } /* FORMS */ .aligned label { padding: 0 0 3px 1em; float: right; } .submit-row { text-align: left } .submit-row p.deletelink-box { float: right; } .submit-row input.default { margin-left: 0; } .vDateField, .vTimeField { margin-left: 2px; } .aligned .form-row input { margin-left: 5px; } form .aligned p.help, form .aligned div.help { clear: right; } form .aligned ul { margin-right: 163px; margin-left: 0; } form ul.inline li { float: right; padding-right: 0; padding-left: 7px; } input[type=submit].default, .submit-row input.default { float: left; } fieldset .fieldBox { float: right; margin-left: 20px; margin-right: 0; } .errorlist li { background-position: 100% 12px; padding: 0; } .errornote { background-position: 100% 12px; padding: 10px 12px; } /* WIDGETS */ .calendarnav-previous { top: 0; left: auto; right: 10px; } .calendarnav-next { top: 0; right: auto; left: 10px; } .calendar caption, .calendarbox h2 { text-align: center; } .selector { float: right; } .selector .selector-filter { text-align: right; } .inline-deletelink { float: left; } form .form-row p.datetime { overflow: hidden; } .related-widget-wrapper { float: right; } /* MISC */ .inline-related h2, .inline-group h2 { text-align: right } .inline-related h3 span.delete { padding-right: 20px; padding-left: inherit; left: 10px; right: inherit; float:left; } .inline-related h3 span.delete label { margin-left: inherit; margin-right: 2px; }
Django-locallibrary/LocalLibrary/staticfiles/admin/css/rtl.4bc23eb90919.css/0
{ "file_path": "Django-locallibrary/LocalLibrary/staticfiles/admin/css/rtl.4bc23eb90919.css", "repo_id": "Django-locallibrary", "token_count": 1421 }
4
/* SELECTOR (FILTER INTERFACE) */ .selector { width: 800px; float: left; } .selector select { width: 380px; height: 17.2em; } .selector-available, .selector-chosen { float: left; width: 380px; text-align: center; margin-bottom: 5px; } .selector-chosen select { border-top: none; } .selector-available h2, .selector-chosen h2 { border: 1px solid var(--border-color); border-radius: 4px 4px 0 0; } .selector-chosen h2 { background: var(--primary); color: var(--header-link-color); } .selector .selector-available h2 { background: var(--darkened-bg); color: var(--body-quiet-color); } .selector .selector-filter { border: 1px solid var(--border-color); border-width: 0 1px; padding: 8px; color: var(--body-quiet-color); font-size: 10px; margin: 0; text-align: left; } .selector .selector-filter label, .inline-group .aligned .selector .selector-filter label { float: left; margin: 7px 0 0; width: 18px; height: 18px; padding: 0; overflow: hidden; line-height: 1; } .selector .selector-available input { width: 320px; margin-left: 8px; } .selector ul.selector-chooser { float: left; width: 22px; background-color: var(--selected-bg); border-radius: 10px; margin: 10em 5px 0 5px; padding: 0; } .selector-chooser li { margin: 0; padding: 3px; list-style-type: none; } .selector select { padding: 0 10px; margin: 0 0 10px; border-radius: 0 0 4px 4px; } .selector-add, .selector-remove { width: 16px; height: 16px; display: block; text-indent: -3000px; overflow: hidden; cursor: default; opacity: 0.55; } .active.selector-add, .active.selector-remove { opacity: 1; } .active.selector-add:hover, .active.selector-remove:hover { cursor: pointer; } .selector-add { background: url("../img/selector-icons.b4555096cea2.svg") 0 -96px no-repeat; } .active.selector-add:focus, .active.selector-add:hover { background-position: 0 -112px; } .selector-remove { background: url("../img/selector-icons.b4555096cea2.svg") 0 -64px no-repeat; } .active.selector-remove:focus, .active.selector-remove:hover { background-position: 0 -80px; } a.selector-chooseall, a.selector-clearall { display: inline-block; height: 16px; text-align: left; margin: 1px auto 3px; overflow: hidden; font-weight: bold; line-height: 16px; color: var(--body-quiet-color); text-decoration: none; opacity: 0.55; } a.active.selector-chooseall:focus, a.active.selector-clearall:focus, a.active.selector-chooseall:hover, a.active.selector-clearall:hover { color: var(--link-fg); } a.active.selector-chooseall, a.active.selector-clearall { opacity: 1; } a.active.selector-chooseall:hover, a.active.selector-clearall:hover { cursor: pointer; } a.selector-chooseall { padding: 0 18px 0 0; background: url("../img/selector-icons.b4555096cea2.svg") right -160px no-repeat; cursor: default; } a.active.selector-chooseall:focus, a.active.selector-chooseall:hover { background-position: 100% -176px; } a.selector-clearall { padding: 0 0 0 18px; background: url("../img/selector-icons.b4555096cea2.svg") 0 -128px no-repeat; cursor: default; } a.active.selector-clearall:focus, a.active.selector-clearall:hover { background-position: 0 -144px; } /* STACKED SELECTORS */ .stacked { float: left; width: 490px; } .stacked select { width: 480px; height: 10.1em; } .stacked .selector-available, .stacked .selector-chosen { width: 480px; } .stacked .selector-available { margin-bottom: 0; } .stacked .selector-available input { width: 422px; } .stacked ul.selector-chooser { height: 22px; width: 50px; margin: 0 0 10px 40%; background-color: #eee; border-radius: 10px; } .stacked .selector-chooser li { float: left; padding: 3px 3px 3px 5px; } .stacked .selector-chooseall, .stacked .selector-clearall { display: none; } .stacked .selector-add { background: url("../img/selector-icons.b4555096cea2.svg") 0 -32px no-repeat; cursor: default; } .stacked .active.selector-add { background-position: 0 -32px; cursor: pointer; } .stacked .active.selector-add:focus, .stacked .active.selector-add:hover { background-position: 0 -48px; cursor: pointer; } .stacked .selector-remove { background: url("../img/selector-icons.b4555096cea2.svg") 0 0 no-repeat; cursor: default; } .stacked .active.selector-remove { background-position: 0 0px; cursor: pointer; } .stacked .active.selector-remove:focus, .stacked .active.selector-remove:hover { background-position: 0 -16px; cursor: pointer; } .selector .help-icon { background: url("../img/icon-unknown.a18cb4398978.svg") 0 0 no-repeat; display: inline-block; vertical-align: middle; margin: -2px 0 0 2px; width: 13px; height: 13px; } .selector .selector-chosen .help-icon { background: url("../img/icon-unknown-alt.81536e128bb6.svg") 0 0 no-repeat; } .selector .search-label-icon { background: url("../img/search.7cf54ff789c6.svg") 0 0 no-repeat; display: inline-block; height: 18px; width: 18px; } /* DATE AND TIME */ p.datetime { line-height: 20px; margin: 0; padding: 0; color: var(--body-quiet-color); font-weight: bold; } .datetime span { white-space: nowrap; font-weight: normal; font-size: 11px; color: var(--body-quiet-color); } .datetime input, .form-row .datetime input.vDateField, .form-row .datetime input.vTimeField { margin-left: 5px; margin-bottom: 4px; } table p.datetime { font-size: 11px; margin-left: 0; padding-left: 0; } .datetimeshortcuts .clock-icon, .datetimeshortcuts .date-icon { position: relative; display: inline-block; vertical-align: middle; height: 16px; width: 16px; overflow: hidden; } .datetimeshortcuts .clock-icon { background: url("../img/icon-clock.e1d4dfac3f2b.svg") 0 0 no-repeat; } .datetimeshortcuts a:focus .clock-icon, .datetimeshortcuts a:hover .clock-icon { background-position: 0 -16px; } .datetimeshortcuts .date-icon { background: url("../img/icon-calendar.ac7aea671bea.svg") 0 0 no-repeat; top: -1px; } .datetimeshortcuts a:focus .date-icon, .datetimeshortcuts a:hover .date-icon { background-position: 0 -16px; } .timezonewarning { font-size: 11px; color: var(--body-quiet-color); } /* URL */ p.url { line-height: 20px; margin: 0; padding: 0; color: var(--body-quiet-color); font-size: 11px; font-weight: bold; } .url a { font-weight: normal; } /* FILE UPLOADS */ p.file-upload { line-height: 20px; margin: 0; padding: 0; color: var(--body-quiet-color); font-size: 11px; font-weight: bold; } .aligned p.file-upload { margin-left: 170px; } .file-upload a { font-weight: normal; } .file-upload .deletelink { margin-left: 5px; } span.clearable-file-input label { color: var(--body-fg); font-size: 11px; display: inline; float: none; } /* CALENDARS & CLOCKS */ .calendarbox, .clockbox { margin: 5px auto; font-size: 12px; width: 19em; text-align: center; background: var(--body-bg); color: var(--body-fg); border: 1px solid var(--hairline-color); border-radius: 4px; box-shadow: 0 2px 4px rgba(0, 0, 0, 0.15); overflow: hidden; position: relative; } .clockbox { width: auto; } .calendar { margin: 0; padding: 0; } .calendar table { margin: 0; padding: 0; border-collapse: collapse; background: white; width: 100%; } .calendar caption, .calendarbox h2 { margin: 0; text-align: center; border-top: none; font-weight: 700; font-size: 12px; color: #333; background: var(--accent); } .calendar th { padding: 8px 5px; background: var(--darkened-bg); border-bottom: 1px solid var(--border-color); font-weight: 400; font-size: 12px; text-align: center; color: var(--body-quiet-color); } .calendar td { font-weight: 400; font-size: 12px; text-align: center; padding: 0; border-top: 1px solid var(--hairline-color); border-bottom: none; } .calendar td.selected a { background: var(--primary); color: var(--button-fg); } .calendar td.nonday { background: var(--darkened-bg); } .calendar td.today a { font-weight: 700; } .calendar td a, .timelist a { display: block; font-weight: 400; padding: 6px; text-decoration: none; color: var(--body-quiet-color); } .calendar td a:focus, .timelist a:focus, .calendar td a:hover, .timelist a:hover { background: var(--primary); color: white; } .calendar td a:active, .timelist a:active { background: var(--header-bg); color: white; } .calendarnav { font-size: 10px; text-align: center; color: #ccc; margin: 0; padding: 1px 3px; } .calendarnav a:link, #calendarnav a:visited, #calendarnav a:focus, #calendarnav a:hover { color: var(--body-quiet-color); } .calendar-shortcuts { background: var(--body-bg); color: var(--body-quiet-color); font-size: 11px; line-height: 11px; border-top: 1px solid var(--hairline-color); padding: 8px 0; } .calendarbox .calendarnav-previous, .calendarbox .calendarnav-next { display: block; position: absolute; top: 8px; width: 15px; height: 15px; text-indent: -9999px; padding: 0; } .calendarnav-previous { left: 10px; background: url("../img/calendar-icons.39b290681a8b.svg") 0 0 no-repeat; } .calendarbox .calendarnav-previous:focus, .calendarbox .calendarnav-previous:hover { background-position: 0 -15px; } .calendarnav-next { right: 10px; background: url("../img/calendar-icons.39b290681a8b.svg") 0 -30px no-repeat; } .calendarbox .calendarnav-next:focus, .calendarbox .calendarnav-next:hover { background-position: 0 -45px; } .calendar-cancel { margin: 0; padding: 4px 0; font-size: 12px; background: #eee; border-top: 1px solid var(--border-color); color: var(--body-fg); } .calendar-cancel:focus, .calendar-cancel:hover { background: #ddd; } .calendar-cancel a { color: black; display: block; } ul.timelist, .timelist li { list-style-type: none; margin: 0; padding: 0; } .timelist a { padding: 2px; } /* EDIT INLINE */ .inline-deletelink { float: right; text-indent: -9999px; background: url("../img/inline-delete.fec1b761f254.svg") 0 0 no-repeat; width: 16px; height: 16px; border: 0px none; } .inline-deletelink:focus, .inline-deletelink:hover { cursor: pointer; } /* RELATED WIDGET WRAPPER */ .related-widget-wrapper { float: left; /* display properly in form rows with multiple fields */ overflow: hidden; /* clear floated contents */ } .related-widget-wrapper-link { opacity: 0.3; } .related-widget-wrapper-link:link { opacity: .8; } .related-widget-wrapper-link:link:focus, .related-widget-wrapper-link:link:hover { opacity: 1; } select + .related-widget-wrapper-link, .related-widget-wrapper-link + .related-widget-wrapper-link { margin-left: 7px; }
Django-locallibrary/LocalLibrary/staticfiles/admin/css/widgets.694d845b2cb1.css/0
{ "file_path": "Django-locallibrary/LocalLibrary/staticfiles/admin/css/widgets.694d845b2cb1.css", "repo_id": "Django-locallibrary", "token_count": 4861 }
5
{% extends "base_template.html" %} {% block content %} {% if form.errors %} <p>Your username and password didn't match. Please try again.</p> {% endif %} {% if next %} {% if user.is_authenticated %} <p>Your account doesn't have access to this page. To proceed, please login with an account that has access.</p> {% else %} <p>Please login to see this page.</p> {% endif %} {% endif %} <form method="post" action="{% url 'login' %}"> {% csrf_token %} <table> <tr> <td>{{ form.username.label_tag }}</td> <td>{{ form.username }}</td> </tr> <tr> <td>{{ form.password.label_tag }}</td> <td>{{ form.password }}</td> </tr> </table> <input type="submit" value="login" /> <input type="hidden" name="next" value="{{ next }}" /> </form> {# Assumes you setup the password_reset view in your URLconf #} <p><a href="{% url 'password_reset' %}">Lost password?</a></p> {% endblock %}
Django-locallibrary/LocalLibrary/templates/registration/login.html/0
{ "file_path": "Django-locallibrary/LocalLibrary/templates/registration/login.html", "repo_id": "Django-locallibrary", "token_count": 518 }
6
from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: from typing import List, Optional __version__ = "20.2.3" def main(args=None): # type: (Optional[List[str]]) -> int """This is an internal API only meant for use by pip's own console scripts. For additional details, see https://github.com/pypa/pip/issues/7498. """ from pip._internal.utils.entrypoints import _wrapper return _wrapper(args)
Django-locallibrary/env/Lib/site-packages/pip/__init__.py/0
{ "file_path": "Django-locallibrary/env/Lib/site-packages/pip/__init__.py", "repo_id": "Django-locallibrary", "token_count": 161 }
7
"""Cache Management """ import hashlib import json import logging import os from pip._vendor.packaging.tags import interpreter_name, interpreter_version from pip._vendor.packaging.utils import canonicalize_name from pip._internal.exceptions import InvalidWheelFilename from pip._internal.models.link import Link from pip._internal.models.wheel import Wheel from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.utils.urls import path_to_url if MYPY_CHECK_RUNNING: from typing import Optional, Set, List, Any, Dict from pip._vendor.packaging.tags import Tag from pip._internal.models.format_control import FormatControl logger = logging.getLogger(__name__) def _hash_dict(d): # type: (Dict[str, str]) -> str """Return a stable sha224 of a dictionary.""" s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True) return hashlib.sha224(s.encode("ascii")).hexdigest() class Cache(object): """An abstract class - provides cache directories for data from links :param cache_dir: The root of the cache. :param format_control: An object of FormatControl class to limit binaries being read from the cache. :param allowed_formats: which formats of files the cache should store. ('binary' and 'source' are the only allowed values) """ def __init__(self, cache_dir, format_control, allowed_formats): # type: (str, FormatControl, Set[str]) -> None super(Cache, self).__init__() assert not cache_dir or os.path.isabs(cache_dir) self.cache_dir = cache_dir or None self.format_control = format_control self.allowed_formats = allowed_formats _valid_formats = {"source", "binary"} assert self.allowed_formats.union(_valid_formats) == _valid_formats def _get_cache_path_parts_legacy(self, link): # type: (Link) -> List[str] """Get parts of part that must be os.path.joined with cache_dir Legacy cache key (pip < 20) for compatibility with older caches. """ # We want to generate an url to use as our cache key, we don't want to # just re-use the URL because it might have other items in the fragment # and we don't care about those. key_parts = [link.url_without_fragment] if link.hash_name is not None and link.hash is not None: key_parts.append("=".join([link.hash_name, link.hash])) key_url = "#".join(key_parts) # Encode our key url with sha224, we'll use this because it has similar # security properties to sha256, but with a shorter total output (and # thus less secure). However the differences don't make a lot of # difference for our use case here. hashed = hashlib.sha224(key_url.encode()).hexdigest() # We want to nest the directories some to prevent having a ton of top # level directories where we might run out of sub directories on some # FS. parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] return parts def _get_cache_path_parts(self, link): # type: (Link) -> List[str] """Get parts of part that must be os.path.joined with cache_dir """ # We want to generate an url to use as our cache key, we don't want to # just re-use the URL because it might have other items in the fragment # and we don't care about those. key_parts = {"url": link.url_without_fragment} if link.hash_name is not None and link.hash is not None: key_parts[link.hash_name] = link.hash if link.subdirectory_fragment: key_parts["subdirectory"] = link.subdirectory_fragment # Include interpreter name, major and minor version in cache key # to cope with ill-behaved sdists that build a different wheel # depending on the python version their setup.py is being run on, # and don't encode the difference in compatibility tags. # https://github.com/pypa/pip/issues/7296 key_parts["interpreter_name"] = interpreter_name() key_parts["interpreter_version"] = interpreter_version() # Encode our key url with sha224, we'll use this because it has similar # security properties to sha256, but with a shorter total output (and # thus less secure). However the differences don't make a lot of # difference for our use case here. hashed = _hash_dict(key_parts) # We want to nest the directories some to prevent having a ton of top # level directories where we might run out of sub directories on some # FS. parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] return parts def _get_candidates(self, link, canonical_package_name): # type: (Link, str) -> List[Any] can_not_cache = ( not self.cache_dir or not canonical_package_name or not link ) if can_not_cache: return [] formats = self.format_control.get_allowed_formats( canonical_package_name ) if not self.allowed_formats.intersection(formats): return [] candidates = [] path = self.get_path_for_link(link) if os.path.isdir(path): for candidate in os.listdir(path): candidates.append((candidate, path)) # TODO remove legacy path lookup in pip>=21 legacy_path = self.get_path_for_link_legacy(link) if os.path.isdir(legacy_path): for candidate in os.listdir(legacy_path): candidates.append((candidate, legacy_path)) return candidates def get_path_for_link_legacy(self, link): # type: (Link) -> str raise NotImplementedError() def get_path_for_link(self, link): # type: (Link) -> str """Return a directory to store cached items in for link. """ raise NotImplementedError() def get( self, link, # type: Link package_name, # type: Optional[str] supported_tags, # type: List[Tag] ): # type: (...) -> Link """Returns a link to a cached item if it exists, otherwise returns the passed link. """ raise NotImplementedError() class SimpleWheelCache(Cache): """A cache of wheels for future installs. """ def __init__(self, cache_dir, format_control): # type: (str, FormatControl) -> None super(SimpleWheelCache, self).__init__( cache_dir, format_control, {"binary"} ) def get_path_for_link_legacy(self, link): # type: (Link) -> str parts = self._get_cache_path_parts_legacy(link) assert self.cache_dir return os.path.join(self.cache_dir, "wheels", *parts) def get_path_for_link(self, link): # type: (Link) -> str """Return a directory to store cached wheels for link Because there are M wheels for any one sdist, we provide a directory to cache them in, and then consult that directory when looking up cache hits. We only insert things into the cache if they have plausible version numbers, so that we don't contaminate the cache with things that were not unique. E.g. ./package might have dozens of installs done for it and build a version of 0.0...and if we built and cached a wheel, we'd end up using the same wheel even if the source has been edited. :param link: The link of the sdist for which this will cache wheels. """ parts = self._get_cache_path_parts(link) assert self.cache_dir # Store wheels within the root cache_dir return os.path.join(self.cache_dir, "wheels", *parts) def get( self, link, # type: Link package_name, # type: Optional[str] supported_tags, # type: List[Tag] ): # type: (...) -> Link candidates = [] if not package_name: return link canonical_package_name = canonicalize_name(package_name) for wheel_name, wheel_dir in self._get_candidates( link, canonical_package_name ): try: wheel = Wheel(wheel_name) except InvalidWheelFilename: continue if canonicalize_name(wheel.name) != canonical_package_name: logger.debug( "Ignoring cached wheel %s for %s as it " "does not match the expected distribution name %s.", wheel_name, link, package_name, ) continue if not wheel.supported(supported_tags): # Built for a different python/arch/etc continue candidates.append( ( wheel.support_index_min(supported_tags), wheel_name, wheel_dir, ) ) if not candidates: return link _, wheel_name, wheel_dir = min(candidates) return Link(path_to_url(os.path.join(wheel_dir, wheel_name))) class EphemWheelCache(SimpleWheelCache): """A SimpleWheelCache that creates it's own temporary cache directory """ def __init__(self, format_control): # type: (FormatControl) -> None self._temp_dir = TempDirectory( kind=tempdir_kinds.EPHEM_WHEEL_CACHE, globally_managed=True, ) super(EphemWheelCache, self).__init__( self._temp_dir.path, format_control ) class CacheEntry(object): def __init__( self, link, # type: Link persistent, # type: bool ): self.link = link self.persistent = persistent class WheelCache(Cache): """Wraps EphemWheelCache and SimpleWheelCache into a single Cache This Cache allows for gracefully degradation, using the ephem wheel cache when a certain link is not found in the simple wheel cache first. """ def __init__(self, cache_dir, format_control): # type: (str, FormatControl) -> None super(WheelCache, self).__init__( cache_dir, format_control, {'binary'} ) self._wheel_cache = SimpleWheelCache(cache_dir, format_control) self._ephem_cache = EphemWheelCache(format_control) def get_path_for_link_legacy(self, link): # type: (Link) -> str return self._wheel_cache.get_path_for_link_legacy(link) def get_path_for_link(self, link): # type: (Link) -> str return self._wheel_cache.get_path_for_link(link) def get_ephem_path_for_link(self, link): # type: (Link) -> str return self._ephem_cache.get_path_for_link(link) def get( self, link, # type: Link package_name, # type: Optional[str] supported_tags, # type: List[Tag] ): # type: (...) -> Link cache_entry = self.get_cache_entry(link, package_name, supported_tags) if cache_entry is None: return link return cache_entry.link def get_cache_entry( self, link, # type: Link package_name, # type: Optional[str] supported_tags, # type: List[Tag] ): # type: (...) -> Optional[CacheEntry] """Returns a CacheEntry with a link to a cached item if it exists or None. The cache entry indicates if the item was found in the persistent or ephemeral cache. """ retval = self._wheel_cache.get( link=link, package_name=package_name, supported_tags=supported_tags, ) if retval is not link: return CacheEntry(retval, persistent=True) retval = self._ephem_cache.get( link=link, package_name=package_name, supported_tags=supported_tags, ) if retval is not link: return CacheEntry(retval, persistent=False) return None
Django-locallibrary/env/Lib/site-packages/pip/_internal/cache.py/0
{ "file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/cache.py", "repo_id": "Django-locallibrary", "token_count": 5131 }
8
""" shared options and groups The principle here is to define options once, but *not* instantiate them globally. One reason being that options with action='append' can carry state between parses. pip parses general options twice internally, and shouldn't pass on state. To be consistent, all options will follow this design. """ # The following comment should be removed at some point in the future. # mypy: strict-optional=False from __future__ import absolute_import import os import textwrap import warnings from distutils.util import strtobool from functools import partial from optparse import SUPPRESS_HELP, Option, OptionGroup from textwrap import dedent from pip._internal.cli.progress_bars import BAR_TYPES from pip._internal.exceptions import CommandError from pip._internal.locations import USER_CACHE_DIR, get_src_prefix from pip._internal.models.format_control import FormatControl from pip._internal.models.index import PyPI from pip._internal.models.target_python import TargetPython from pip._internal.utils.hashes import STRONG_HASHES from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: from typing import Any, Callable, Dict, Optional, Tuple from optparse import OptionParser, Values from pip._internal.cli.parser import ConfigOptionParser def raise_option_error(parser, option, msg): # type: (OptionParser, Option, str) -> None """ Raise an option parsing error using parser.error(). Args: parser: an OptionParser instance. option: an Option instance. msg: the error text. """ msg = '{} error: {}'.format(option, msg) msg = textwrap.fill(' '.join(msg.split())) parser.error(msg) def make_option_group(group, parser): # type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup """ Return an OptionGroup object group -- assumed to be dict with 'name' and 'options' keys parser -- an optparse Parser """ option_group = OptionGroup(parser, group['name']) for option in group['options']: option_group.add_option(option()) return option_group def check_install_build_global(options, check_options=None): # type: (Values, Optional[Values]) -> None """Disable wheels if per-setup.py call options are set. :param options: The OptionParser options to update. :param check_options: The options to check, if not supplied defaults to options. """ if check_options is None: check_options = options def getname(n): # type: (str) -> Optional[Any] return getattr(check_options, n, None) names = ["build_options", "global_options", "install_options"] if any(map(getname, names)): control = options.format_control control.disallow_binaries() warnings.warn( 'Disabling all use of wheels due to the use of --build-option ' '/ --global-option / --install-option.', stacklevel=2, ) def check_dist_restriction(options, check_target=False): # type: (Values, bool) -> None """Function for determining if custom platform options are allowed. :param options: The OptionParser options. :param check_target: Whether or not to check if --target is being used. """ dist_restriction_set = any([ options.python_version, options.platform, options.abi, options.implementation, ]) binary_only = FormatControl(set(), {':all:'}) sdist_dependencies_allowed = ( options.format_control != binary_only and not options.ignore_dependencies ) # Installations or downloads using dist restrictions must not combine # source distributions and dist-specific wheels, as they are not # guaranteed to be locally compatible. if dist_restriction_set and sdist_dependencies_allowed: raise CommandError( "When restricting platform and interpreter constraints using " "--python-version, --platform, --abi, or --implementation, " "either --no-deps must be set, or --only-binary=:all: must be " "set and --no-binary must not be set (or must be set to " ":none:)." ) if check_target: if dist_restriction_set and not options.target_dir: raise CommandError( "Can not use any platform or abi specific options unless " "installing via '--target'" ) def _path_option_check(option, opt, value): # type: (Option, str, str) -> str return os.path.expanduser(value) class PipOption(Option): TYPES = Option.TYPES + ("path",) TYPE_CHECKER = Option.TYPE_CHECKER.copy() TYPE_CHECKER["path"] = _path_option_check ########### # options # ########### help_ = partial( Option, '-h', '--help', dest='help', action='help', help='Show help.', ) # type: Callable[..., Option] isolated_mode = partial( Option, "--isolated", dest="isolated_mode", action="store_true", default=False, help=( "Run pip in an isolated mode, ignoring environment variables and user " "configuration." ), ) # type: Callable[..., Option] require_virtualenv = partial( Option, # Run only if inside a virtualenv, bail if not. '--require-virtualenv', '--require-venv', dest='require_venv', action='store_true', default=False, help=SUPPRESS_HELP ) # type: Callable[..., Option] verbose = partial( Option, '-v', '--verbose', dest='verbose', action='count', default=0, help='Give more output. Option is additive, and can be used up to 3 times.' ) # type: Callable[..., Option] no_color = partial( Option, '--no-color', dest='no_color', action='store_true', default=False, help="Suppress colored output", ) # type: Callable[..., Option] version = partial( Option, '-V', '--version', dest='version', action='store_true', help='Show version and exit.', ) # type: Callable[..., Option] quiet = partial( Option, '-q', '--quiet', dest='quiet', action='count', default=0, help=( 'Give less output. Option is additive, and can be used up to 3' ' times (corresponding to WARNING, ERROR, and CRITICAL logging' ' levels).' ), ) # type: Callable[..., Option] progress_bar = partial( Option, '--progress-bar', dest='progress_bar', type='choice', choices=list(BAR_TYPES.keys()), default='on', help=( 'Specify type of progress to be displayed [' + '|'.join(BAR_TYPES.keys()) + '] (default: %default)' ), ) # type: Callable[..., Option] log = partial( PipOption, "--log", "--log-file", "--local-log", dest="log", metavar="path", type="path", help="Path to a verbose appending log." ) # type: Callable[..., Option] no_input = partial( Option, # Don't ask for input '--no-input', dest='no_input', action='store_true', default=False, help="Disable prompting for input." ) # type: Callable[..., Option] proxy = partial( Option, '--proxy', dest='proxy', type='str', default='', help="Specify a proxy in the form [user:passwd@]proxy.server:port." ) # type: Callable[..., Option] retries = partial( Option, '--retries', dest='retries', type='int', default=5, help="Maximum number of retries each connection should attempt " "(default %default times).", ) # type: Callable[..., Option] timeout = partial( Option, '--timeout', '--default-timeout', metavar='sec', dest='timeout', type='float', default=15, help='Set the socket timeout (default %default seconds).', ) # type: Callable[..., Option] def exists_action(): # type: () -> Option return Option( # Option when path already exist '--exists-action', dest='exists_action', type='choice', choices=['s', 'i', 'w', 'b', 'a'], default=[], action='append', metavar='action', help="Default action when a path already exists: " "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.", ) cert = partial( PipOption, '--cert', dest='cert', type='path', metavar='path', help="Path to alternate CA bundle.", ) # type: Callable[..., Option] client_cert = partial( PipOption, '--client-cert', dest='client_cert', type='path', default=None, metavar='path', help="Path to SSL client certificate, a single file containing the " "private key and the certificate in PEM format.", ) # type: Callable[..., Option] index_url = partial( Option, '-i', '--index-url', '--pypi-url', dest='index_url', metavar='URL', default=PyPI.simple_url, help="Base URL of the Python Package Index (default %default). " "This should point to a repository compliant with PEP 503 " "(the simple repository API) or a local directory laid out " "in the same format.", ) # type: Callable[..., Option] def extra_index_url(): # type: () -> Option return Option( '--extra-index-url', dest='extra_index_urls', metavar='URL', action='append', default=[], help="Extra URLs of package indexes to use in addition to " "--index-url. Should follow the same rules as " "--index-url.", ) no_index = partial( Option, '--no-index', dest='no_index', action='store_true', default=False, help='Ignore package index (only looking at --find-links URLs instead).', ) # type: Callable[..., Option] def find_links(): # type: () -> Option return Option( '-f', '--find-links', dest='find_links', action='append', default=[], metavar='url', help="If a URL or path to an html file, then parse for links to " "archives such as sdist (.tar.gz) or wheel (.whl) files. " "If a local path or file:// URL that's a directory, " "then look for archives in the directory listing. " "Links to VCS project URLs are not supported.", ) def trusted_host(): # type: () -> Option return Option( "--trusted-host", dest="trusted_hosts", action="append", metavar="HOSTNAME", default=[], help="Mark this host or host:port pair as trusted, even though it " "does not have valid or any HTTPS.", ) def constraints(): # type: () -> Option return Option( '-c', '--constraint', dest='constraints', action='append', default=[], metavar='file', help='Constrain versions using the given constraints file. ' 'This option can be used multiple times.' ) def requirements(): # type: () -> Option return Option( '-r', '--requirement', dest='requirements', action='append', default=[], metavar='file', help='Install from the given requirements file. ' 'This option can be used multiple times.' ) def editable(): # type: () -> Option return Option( '-e', '--editable', dest='editables', action='append', default=[], metavar='path/url', help=('Install a project in editable mode (i.e. setuptools ' '"develop mode") from a local project path or a VCS url.'), ) def _handle_src(option, opt_str, value, parser): # type: (Option, str, str, OptionParser) -> None value = os.path.abspath(value) setattr(parser.values, option.dest, value) src = partial( PipOption, '--src', '--source', '--source-dir', '--source-directory', dest='src_dir', type='path', metavar='dir', default=get_src_prefix(), action='callback', callback=_handle_src, help='Directory to check out editable projects into. ' 'The default in a virtualenv is "<venv path>/src". ' 'The default for global installs is "<current dir>/src".' ) # type: Callable[..., Option] def _get_format_control(values, option): # type: (Values, Option) -> Any """Get a format_control object.""" return getattr(values, option.dest) def _handle_no_binary(option, opt_str, value, parser): # type: (Option, str, str, OptionParser) -> None existing = _get_format_control(parser.values, option) FormatControl.handle_mutual_excludes( value, existing.no_binary, existing.only_binary, ) def _handle_only_binary(option, opt_str, value, parser): # type: (Option, str, str, OptionParser) -> None existing = _get_format_control(parser.values, option) FormatControl.handle_mutual_excludes( value, existing.only_binary, existing.no_binary, ) def no_binary(): # type: () -> Option format_control = FormatControl(set(), set()) return Option( "--no-binary", dest="format_control", action="callback", callback=_handle_no_binary, type="str", default=format_control, help='Do not use binary packages. Can be supplied multiple times, and ' 'each time adds to the existing value. Accepts either ":all:" to ' 'disable all binary packages, ":none:" to empty the set (notice ' 'the colons), or one or more package names with commas between ' 'them (no colons). Note that some packages are tricky to compile ' 'and may fail to install when this option is used on them.', ) def only_binary(): # type: () -> Option format_control = FormatControl(set(), set()) return Option( "--only-binary", dest="format_control", action="callback", callback=_handle_only_binary, type="str", default=format_control, help='Do not use source packages. Can be supplied multiple times, and ' 'each time adds to the existing value. Accepts either ":all:" to ' 'disable all source packages, ":none:" to empty the set, or one ' 'or more package names with commas between them. Packages ' 'without binary distributions will fail to install when this ' 'option is used on them.', ) platform = partial( Option, '--platform', dest='platform', metavar='platform', default=None, help=("Only use wheels compatible with <platform>. " "Defaults to the platform of the running system."), ) # type: Callable[..., Option] # This was made a separate function for unit-testing purposes. def _convert_python_version(value): # type: (str) -> Tuple[Tuple[int, ...], Optional[str]] """ Convert a version string like "3", "37", or "3.7.3" into a tuple of ints. :return: A 2-tuple (version_info, error_msg), where `error_msg` is non-None if and only if there was a parsing error. """ if not value: # The empty string is the same as not providing a value. return (None, None) parts = value.split('.') if len(parts) > 3: return ((), 'at most three version parts are allowed') if len(parts) == 1: # Then we are in the case of "3" or "37". value = parts[0] if len(value) > 1: parts = [value[0], value[1:]] try: version_info = tuple(int(part) for part in parts) except ValueError: return ((), 'each version part must be an integer') return (version_info, None) def _handle_python_version(option, opt_str, value, parser): # type: (Option, str, str, OptionParser) -> None """ Handle a provided --python-version value. """ version_info, error_msg = _convert_python_version(value) if error_msg is not None: msg = ( 'invalid --python-version value: {!r}: {}'.format( value, error_msg, ) ) raise_option_error(parser, option=option, msg=msg) parser.values.python_version = version_info python_version = partial( Option, '--python-version', dest='python_version', metavar='python_version', action='callback', callback=_handle_python_version, type='str', default=None, help=dedent("""\ The Python interpreter version to use for wheel and "Requires-Python" compatibility checks. Defaults to a version derived from the running interpreter. The version can be specified using up to three dot-separated integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor version can also be given as a string without dots (e.g. "37" for 3.7.0). """), ) # type: Callable[..., Option] implementation = partial( Option, '--implementation', dest='implementation', metavar='implementation', default=None, help=("Only use wheels compatible with Python " "implementation <implementation>, e.g. 'pp', 'jy', 'cp', " " or 'ip'. If not specified, then the current " "interpreter implementation is used. Use 'py' to force " "implementation-agnostic wheels."), ) # type: Callable[..., Option] abi = partial( Option, '--abi', dest='abi', metavar='abi', default=None, help=("Only use wheels compatible with Python " "abi <abi>, e.g. 'pypy_41'. If not specified, then the " "current interpreter abi tag is used. Generally " "you will need to specify --implementation, " "--platform, and --python-version when using " "this option."), ) # type: Callable[..., Option] def add_target_python_options(cmd_opts): # type: (OptionGroup) -> None cmd_opts.add_option(platform()) cmd_opts.add_option(python_version()) cmd_opts.add_option(implementation()) cmd_opts.add_option(abi()) def make_target_python(options): # type: (Values) -> TargetPython target_python = TargetPython( platform=options.platform, py_version_info=options.python_version, abi=options.abi, implementation=options.implementation, ) return target_python def prefer_binary(): # type: () -> Option return Option( "--prefer-binary", dest="prefer_binary", action="store_true", default=False, help="Prefer older binary packages over newer source packages." ) cache_dir = partial( PipOption, "--cache-dir", dest="cache_dir", default=USER_CACHE_DIR, metavar="dir", type='path', help="Store the cache data in <dir>." ) # type: Callable[..., Option] def _handle_no_cache_dir(option, opt, value, parser): # type: (Option, str, str, OptionParser) -> None """ Process a value provided for the --no-cache-dir option. This is an optparse.Option callback for the --no-cache-dir option. """ # The value argument will be None if --no-cache-dir is passed via the # command-line, since the option doesn't accept arguments. However, # the value can be non-None if the option is triggered e.g. by an # environment variable, like PIP_NO_CACHE_DIR=true. if value is not None: # Then parse the string value to get argument error-checking. try: strtobool(value) except ValueError as exc: raise_option_error(parser, option=option, msg=str(exc)) # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool() # converted to 0 (like "false" or "no") caused cache_dir to be disabled # rather than enabled (logic would say the latter). Thus, we disable # the cache directory not just on values that parse to True, but (for # backwards compatibility reasons) also on values that parse to False. # In other words, always set it to False if the option is provided in # some (valid) form. parser.values.cache_dir = False no_cache = partial( Option, "--no-cache-dir", dest="cache_dir", action="callback", callback=_handle_no_cache_dir, help="Disable the cache.", ) # type: Callable[..., Option] no_deps = partial( Option, '--no-deps', '--no-dependencies', dest='ignore_dependencies', action='store_true', default=False, help="Don't install package dependencies.", ) # type: Callable[..., Option] def _handle_build_dir(option, opt, value, parser): # type: (Option, str, str, OptionParser) -> None if value: value = os.path.abspath(value) setattr(parser.values, option.dest, value) build_dir = partial( PipOption, '-b', '--build', '--build-dir', '--build-directory', dest='build_dir', type='path', metavar='dir', action='callback', callback=_handle_build_dir, help='(DEPRECATED) ' 'Directory to unpack packages into and build in. Note that ' 'an initial build still takes place in a temporary directory. ' 'The location of temporary directories can be controlled by setting ' 'the TMPDIR environment variable (TEMP on Windows) appropriately. ' 'When passed, build directories are not cleaned in case of failures.' ) # type: Callable[..., Option] ignore_requires_python = partial( Option, '--ignore-requires-python', dest='ignore_requires_python', action='store_true', help='Ignore the Requires-Python information.' ) # type: Callable[..., Option] no_build_isolation = partial( Option, '--no-build-isolation', dest='build_isolation', action='store_false', default=True, help='Disable isolation when building a modern source distribution. ' 'Build dependencies specified by PEP 518 must be already installed ' 'if this option is used.' ) # type: Callable[..., Option] def _handle_no_use_pep517(option, opt, value, parser): # type: (Option, str, str, OptionParser) -> None """ Process a value provided for the --no-use-pep517 option. This is an optparse.Option callback for the no_use_pep517 option. """ # Since --no-use-pep517 doesn't accept arguments, the value argument # will be None if --no-use-pep517 is passed via the command-line. # However, the value can be non-None if the option is triggered e.g. # by an environment variable, for example "PIP_NO_USE_PEP517=true". if value is not None: msg = """A value was passed for --no-use-pep517, probably using either the PIP_NO_USE_PEP517 environment variable or the "no-use-pep517" config file option. Use an appropriate value of the PIP_USE_PEP517 environment variable or the "use-pep517" config file option instead. """ raise_option_error(parser, option=option, msg=msg) # Otherwise, --no-use-pep517 was passed via the command-line. parser.values.use_pep517 = False use_pep517 = partial( Option, '--use-pep517', dest='use_pep517', action='store_true', default=None, help='Use PEP 517 for building source distributions ' '(use --no-use-pep517 to force legacy behaviour).' ) # type: Any no_use_pep517 = partial( Option, '--no-use-pep517', dest='use_pep517', action='callback', callback=_handle_no_use_pep517, default=None, help=SUPPRESS_HELP ) # type: Any install_options = partial( Option, '--install-option', dest='install_options', action='append', metavar='options', help="Extra arguments to be supplied to the setup.py install " "command (use like --install-option=\"--install-scripts=/usr/local/" "bin\"). Use multiple --install-option options to pass multiple " "options to setup.py install. If you are using an option with a " "directory path, be sure to use absolute path.", ) # type: Callable[..., Option] global_options = partial( Option, '--global-option', dest='global_options', action='append', metavar='options', help="Extra global options to be supplied to the setup.py " "call before the install command.", ) # type: Callable[..., Option] no_clean = partial( Option, '--no-clean', action='store_true', default=False, help="Don't clean up build directories." ) # type: Callable[..., Option] pre = partial( Option, '--pre', action='store_true', default=False, help="Include pre-release and development versions. By default, " "pip only finds stable versions.", ) # type: Callable[..., Option] disable_pip_version_check = partial( Option, "--disable-pip-version-check", dest="disable_pip_version_check", action="store_true", default=False, help="Don't periodically check PyPI to determine whether a new version " "of pip is available for download. Implied with --no-index.", ) # type: Callable[..., Option] def _handle_merge_hash(option, opt_str, value, parser): # type: (Option, str, str, OptionParser) -> None """Given a value spelled "algo:digest", append the digest to a list pointed to in a dict by the algo name.""" if not parser.values.hashes: parser.values.hashes = {} try: algo, digest = value.split(':', 1) except ValueError: parser.error('Arguments to {} must be a hash name ' # noqa 'followed by a value, like --hash=sha256:' 'abcde...'.format(opt_str)) if algo not in STRONG_HASHES: parser.error('Allowed hash algorithms for {} are {}.'.format( # noqa opt_str, ', '.join(STRONG_HASHES))) parser.values.hashes.setdefault(algo, []).append(digest) hash = partial( Option, '--hash', # Hash values eventually end up in InstallRequirement.hashes due to # __dict__ copying in process_line(). dest='hashes', action='callback', callback=_handle_merge_hash, type='string', help="Verify that the package's archive matches this " 'hash before installing. Example: --hash=sha256:abcdef...', ) # type: Callable[..., Option] require_hashes = partial( Option, '--require-hashes', dest='require_hashes', action='store_true', default=False, help='Require a hash to check each requirement against, for ' 'repeatable installs. This option is implied when any package in a ' 'requirements file has a --hash option.', ) # type: Callable[..., Option] list_path = partial( PipOption, '--path', dest='path', type='path', action='append', help='Restrict to the specified installation path for listing ' 'packages (can be used multiple times).' ) # type: Callable[..., Option] def check_list_path_option(options): # type: (Values) -> None if options.path and (options.user or options.local): raise CommandError( "Cannot combine '--path' with '--user' or '--local'" ) no_python_version_warning = partial( Option, '--no-python-version-warning', dest='no_python_version_warning', action='store_true', default=False, help='Silence deprecation warnings for upcoming unsupported Pythons.', ) # type: Callable[..., Option] unstable_feature = partial( Option, '--unstable-feature', dest='unstable_features', metavar='feature', action='append', default=[], choices=['resolver'], help=SUPPRESS_HELP, # TODO: drop this in pip 20.3 ) # type: Callable[..., Option] use_new_feature = partial( Option, '--use-feature', dest='features_enabled', metavar='feature', action='append', default=[], choices=['2020-resolver', 'fast-deps'], help='Enable new functionality, that may be backward incompatible.', ) # type: Callable[..., Option] use_deprecated_feature = partial( Option, '--use-deprecated', dest='deprecated_features_enabled', metavar='feature', action='append', default=[], choices=[], help=( 'Enable deprecated functionality, that will be removed in the future.' ), ) # type: Callable[..., Option] ########## # groups # ########## general_group = { 'name': 'General Options', 'options': [ help_, isolated_mode, require_virtualenv, verbose, version, quiet, log, no_input, proxy, retries, timeout, exists_action, trusted_host, cert, client_cert, cache_dir, no_cache, disable_pip_version_check, no_color, no_python_version_warning, unstable_feature, use_new_feature, use_deprecated_feature, ] } # type: Dict[str, Any] index_group = { 'name': 'Package Index Options', 'options': [ index_url, extra_index_url, no_index, find_links, ] } # type: Dict[str, Any]
Django-locallibrary/env/Lib/site-packages/pip/_internal/cli/cmdoptions.py/0
{ "file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/cli/cmdoptions.py", "repo_id": "Django-locallibrary", "token_count": 11128 }
9
from __future__ import absolute_import import sys from pip._internal.cache import WheelCache from pip._internal.cli import cmdoptions from pip._internal.cli.base_command import Command from pip._internal.cli.status_codes import SUCCESS from pip._internal.models.format_control import FormatControl from pip._internal.operations.freeze import freeze from pip._internal.utils.compat import stdlib_pkgs from pip._internal.utils.typing import MYPY_CHECK_RUNNING DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'} if MYPY_CHECK_RUNNING: from optparse import Values from typing import List class FreezeCommand(Command): """ Output installed packages in requirements format. packages are listed in a case-insensitive sorted order. """ usage = """ %prog [options]""" log_streams = ("ext://sys.stderr", "ext://sys.stderr") def add_options(self): # type: () -> None self.cmd_opts.add_option( '-r', '--requirement', dest='requirements', action='append', default=[], metavar='file', help="Use the order in the given requirements file and its " "comments when generating output. This option can be " "used multiple times.") self.cmd_opts.add_option( '-f', '--find-links', dest='find_links', action='append', default=[], metavar='URL', help='URL for finding packages, which will be added to the ' 'output.') self.cmd_opts.add_option( '-l', '--local', dest='local', action='store_true', default=False, help='If in a virtualenv that has global access, do not output ' 'globally-installed packages.') self.cmd_opts.add_option( '--user', dest='user', action='store_true', default=False, help='Only output packages installed in user-site.') self.cmd_opts.add_option(cmdoptions.list_path()) self.cmd_opts.add_option( '--all', dest='freeze_all', action='store_true', help='Do not skip these packages in the output:' ' {}'.format(', '.join(DEV_PKGS))) self.cmd_opts.add_option( '--exclude-editable', dest='exclude_editable', action='store_true', help='Exclude editable package from output.') self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): # type: (Values, List[str]) -> int format_control = FormatControl(set(), set()) wheel_cache = WheelCache(options.cache_dir, format_control) skip = set(stdlib_pkgs) if not options.freeze_all: skip.update(DEV_PKGS) cmdoptions.check_list_path_option(options) freeze_kwargs = dict( requirement=options.requirements, find_links=options.find_links, local_only=options.local, user_only=options.user, paths=options.path, isolated=options.isolated_mode, wheel_cache=wheel_cache, skip=skip, exclude_editable=options.exclude_editable, ) for line in freeze(**freeze_kwargs): sys.stdout.write(line + '\n') return SUCCESS
Django-locallibrary/env/Lib/site-packages/pip/_internal/commands/freeze.py/0
{ "file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/commands/freeze.py", "repo_id": "Django-locallibrary", "token_count": 1580 }
10
import abc from pip._vendor.six import add_metaclass from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: from typing import Optional from pip._vendor.pkg_resources import Distribution from pip._internal.req import InstallRequirement from pip._internal.index.package_finder import PackageFinder @add_metaclass(abc.ABCMeta) class AbstractDistribution(object): """A base class for handling installable artifacts. The requirements for anything installable are as follows: - we must be able to determine the requirement name (or we can't correctly handle the non-upgrade case). - for packages with setup requirements, we must also be able to determine their requirements without installing additional packages (for the same reason as run-time dependencies) - we must be able to create a Distribution object exposing the above metadata. """ def __init__(self, req): # type: (InstallRequirement) -> None super(AbstractDistribution, self).__init__() self.req = req @abc.abstractmethod def get_pkg_resources_distribution(self): # type: () -> Optional[Distribution] raise NotImplementedError() @abc.abstractmethod def prepare_distribution_metadata(self, finder, build_isolation): # type: (PackageFinder, bool) -> None raise NotImplementedError()
Django-locallibrary/env/Lib/site-packages/pip/_internal/distributions/base.py/0
{ "file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/distributions/base.py", "repo_id": "Django-locallibrary", "token_count": 466 }
11
from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: from typing import Optional from pip._internal.models.format_control import FormatControl class SelectionPreferences(object): """ Encapsulates the candidate selection preferences for downloading and installing files. """ __slots__ = ['allow_yanked', 'allow_all_prereleases', 'format_control', 'prefer_binary', 'ignore_requires_python'] # Don't include an allow_yanked default value to make sure each call # site considers whether yanked releases are allowed. This also causes # that decision to be made explicit in the calling code, which helps # people when reading the code. def __init__( self, allow_yanked, # type: bool allow_all_prereleases=False, # type: bool format_control=None, # type: Optional[FormatControl] prefer_binary=False, # type: bool ignore_requires_python=None, # type: Optional[bool] ): # type: (...) -> None """Create a SelectionPreferences object. :param allow_yanked: Whether files marked as yanked (in the sense of PEP 592) are permitted to be candidates for install. :param format_control: A FormatControl object or None. Used to control the selection of source packages / binary packages when consulting the index and links. :param prefer_binary: Whether to prefer an old, but valid, binary dist over a new source dist. :param ignore_requires_python: Whether to ignore incompatible "Requires-Python" values in links. Defaults to False. """ if ignore_requires_python is None: ignore_requires_python = False self.allow_yanked = allow_yanked self.allow_all_prereleases = allow_all_prereleases self.format_control = format_control self.prefer_binary = prefer_binary self.ignore_requires_python = ignore_requires_python
Django-locallibrary/env/Lib/site-packages/pip/_internal/models/selection_prefs.py/0
{ "file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/models/selection_prefs.py", "repo_id": "Django-locallibrary", "token_count": 749 }
12
"""PipSession and supporting code, containing all pip-specific network request configuration and behavior. """ # The following comment should be removed at some point in the future. # mypy: disallow-untyped-defs=False import email.utils import json import logging import mimetypes import os import platform import sys import warnings from pip._vendor import requests, six, urllib3 from pip._vendor.cachecontrol import CacheControlAdapter from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter from pip._vendor.requests.models import Response from pip._vendor.requests.structures import CaseInsensitiveDict from pip._vendor.six.moves.urllib import parse as urllib_parse from pip._vendor.urllib3.exceptions import InsecureRequestWarning from pip import __version__ from pip._internal.network.auth import MultiDomainBasicAuth from pip._internal.network.cache import SafeFileCache # Import ssl from compat so the initial import occurs in only one place. from pip._internal.utils.compat import has_tls, ipaddress from pip._internal.utils.glibc import libc_ver from pip._internal.utils.misc import ( build_url_from_netloc, get_installed_version, parse_netloc, ) from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.utils.urls import url_to_path if MYPY_CHECK_RUNNING: from typing import ( Iterator, List, Optional, Tuple, Union, ) from pip._internal.models.link import Link SecureOrigin = Tuple[str, str, Optional[Union[int, str]]] logger = logging.getLogger(__name__) # Ignore warning raised when using --trusted-host. warnings.filterwarnings("ignore", category=InsecureRequestWarning) SECURE_ORIGINS = [ # protocol, hostname, port # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) ("https", "*", "*"), ("*", "localhost", "*"), ("*", "127.0.0.0/8", "*"), ("*", "::1/128", "*"), ("file", "*", None), # ssh is always secure. ("ssh", "*", "*"), ] # type: List[SecureOrigin] # These are environment variables present when running under various # CI systems. For each variable, some CI systems that use the variable # are indicated. The collection was chosen so that for each of a number # of popular systems, at least one of the environment variables is used. # This list is used to provide some indication of and lower bound for # CI traffic to PyPI. Thus, it is okay if the list is not comprehensive. # For more background, see: https://github.com/pypa/pip/issues/5499 CI_ENVIRONMENT_VARIABLES = ( # Azure Pipelines 'BUILD_BUILDID', # Jenkins 'BUILD_ID', # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI 'CI', # Explicit environment variable. 'PIP_IS_CI', ) def looks_like_ci(): # type: () -> bool """ Return whether it looks like pip is running under CI. """ # We don't use the method of checking for a tty (e.g. using isatty()) # because some CI systems mimic a tty (e.g. Travis CI). Thus that # method doesn't provide definitive information in either direction. return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES) def user_agent(): """ Return a string representing the user agent. """ data = { "installer": {"name": "pip", "version": __version__}, "python": platform.python_version(), "implementation": { "name": platform.python_implementation(), }, } if data["implementation"]["name"] == 'CPython': data["implementation"]["version"] = platform.python_version() elif data["implementation"]["name"] == 'PyPy': if sys.pypy_version_info.releaselevel == 'final': pypy_version_info = sys.pypy_version_info[:3] else: pypy_version_info = sys.pypy_version_info data["implementation"]["version"] = ".".join( [str(x) for x in pypy_version_info] ) elif data["implementation"]["name"] == 'Jython': # Complete Guess data["implementation"]["version"] = platform.python_version() elif data["implementation"]["name"] == 'IronPython': # Complete Guess data["implementation"]["version"] = platform.python_version() if sys.platform.startswith("linux"): from pip._vendor import distro distro_infos = dict(filter( lambda x: x[1], zip(["name", "version", "id"], distro.linux_distribution()), )) libc = dict(filter( lambda x: x[1], zip(["lib", "version"], libc_ver()), )) if libc: distro_infos["libc"] = libc if distro_infos: data["distro"] = distro_infos if sys.platform.startswith("darwin") and platform.mac_ver()[0]: data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} if platform.system(): data.setdefault("system", {})["name"] = platform.system() if platform.release(): data.setdefault("system", {})["release"] = platform.release() if platform.machine(): data["cpu"] = platform.machine() if has_tls(): import _ssl as ssl data["openssl_version"] = ssl.OPENSSL_VERSION setuptools_version = get_installed_version("setuptools") if setuptools_version is not None: data["setuptools_version"] = setuptools_version # Use None rather than False so as not to give the impression that # pip knows it is not being run under CI. Rather, it is a null or # inconclusive result. Also, we include some value rather than no # value to make it easier to know that the check has been run. data["ci"] = True if looks_like_ci() else None user_data = os.environ.get("PIP_USER_AGENT_USER_DATA") if user_data is not None: data["user_data"] = user_data return "{data[installer][name]}/{data[installer][version]} {json}".format( data=data, json=json.dumps(data, separators=(",", ":"), sort_keys=True), ) class LocalFSAdapter(BaseAdapter): def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None): pathname = url_to_path(request.url) resp = Response() resp.status_code = 200 resp.url = request.url try: stats = os.stat(pathname) except OSError as exc: resp.status_code = 404 resp.raw = exc else: modified = email.utils.formatdate(stats.st_mtime, usegmt=True) content_type = mimetypes.guess_type(pathname)[0] or "text/plain" resp.headers = CaseInsensitiveDict({ "Content-Type": content_type, "Content-Length": stats.st_size, "Last-Modified": modified, }) resp.raw = open(pathname, "rb") resp.close = resp.raw.close return resp def close(self): pass class InsecureHTTPAdapter(HTTPAdapter): def cert_verify(self, conn, url, verify, cert): super(InsecureHTTPAdapter, self).cert_verify( conn=conn, url=url, verify=False, cert=cert ) class InsecureCacheControlAdapter(CacheControlAdapter): def cert_verify(self, conn, url, verify, cert): super(InsecureCacheControlAdapter, self).cert_verify( conn=conn, url=url, verify=False, cert=cert ) class PipSession(requests.Session): timeout = None # type: Optional[int] def __init__(self, *args, **kwargs): """ :param trusted_hosts: Domains not to emit warnings for when not using HTTPS. """ retries = kwargs.pop("retries", 0) cache = kwargs.pop("cache", None) trusted_hosts = kwargs.pop("trusted_hosts", []) # type: List[str] index_urls = kwargs.pop("index_urls", None) super(PipSession, self).__init__(*args, **kwargs) # Namespace the attribute with "pip_" just in case to prevent # possible conflicts with the base class. self.pip_trusted_origins = [] # type: List[Tuple[str, Optional[int]]] # Attach our User Agent to the request self.headers["User-Agent"] = user_agent() # Attach our Authentication handler to the session self.auth = MultiDomainBasicAuth(index_urls=index_urls) # Create our urllib3.Retry instance which will allow us to customize # how we handle retries. retries = urllib3.Retry( # Set the total number of retries that a particular request can # have. total=retries, # A 503 error from PyPI typically means that the Fastly -> Origin # connection got interrupted in some way. A 503 error in general # is typically considered a transient error so we'll go ahead and # retry it. # A 500 may indicate transient error in Amazon S3 # A 520 or 527 - may indicate transient error in CloudFlare status_forcelist=[500, 503, 520, 527], # Add a small amount of back off between failed requests in # order to prevent hammering the service. backoff_factor=0.25, ) # Our Insecure HTTPAdapter disables HTTPS validation. It does not # support caching so we'll use it for all http:// URLs. # If caching is disabled, we will also use it for # https:// hosts that we've marked as ignoring # TLS errors for (trusted-hosts). insecure_adapter = InsecureHTTPAdapter(max_retries=retries) # We want to _only_ cache responses on securely fetched origins or when # the host is specified as trusted. We do this because # we can't validate the response of an insecurely/untrusted fetched # origin, and we don't want someone to be able to poison the cache and # require manual eviction from the cache to fix it. if cache: secure_adapter = CacheControlAdapter( cache=SafeFileCache(cache), max_retries=retries, ) self._trusted_host_adapter = InsecureCacheControlAdapter( cache=SafeFileCache(cache), max_retries=retries, ) else: secure_adapter = HTTPAdapter(max_retries=retries) self._trusted_host_adapter = insecure_adapter self.mount("https://", secure_adapter) self.mount("http://", insecure_adapter) # Enable file:// urls self.mount("file://", LocalFSAdapter()) for host in trusted_hosts: self.add_trusted_host(host, suppress_logging=True) def add_trusted_host(self, host, source=None, suppress_logging=False): # type: (str, Optional[str], bool) -> None """ :param host: It is okay to provide a host that has previously been added. :param source: An optional source string, for logging where the host string came from. """ if not suppress_logging: msg = 'adding trusted host: {!r}'.format(host) if source is not None: msg += ' (from {})'.format(source) logger.info(msg) host_port = parse_netloc(host) if host_port not in self.pip_trusted_origins: self.pip_trusted_origins.append(host_port) self.mount( build_url_from_netloc(host) + '/', self._trusted_host_adapter ) if not host_port[1]: # Mount wildcard ports for the same host. self.mount( build_url_from_netloc(host) + ':', self._trusted_host_adapter ) def iter_secure_origins(self): # type: () -> Iterator[SecureOrigin] for secure_origin in SECURE_ORIGINS: yield secure_origin for host, port in self.pip_trusted_origins: yield ('*', host, '*' if port is None else port) def is_secure_origin(self, location): # type: (Link) -> bool # Determine if this url used a secure transport mechanism parsed = urllib_parse.urlparse(str(location)) origin_protocol, origin_host, origin_port = ( parsed.scheme, parsed.hostname, parsed.port, ) # The protocol to use to see if the protocol matches. # Don't count the repository type as part of the protocol: in # cases such as "git+ssh", only use "ssh". (I.e., Only verify against # the last scheme.) origin_protocol = origin_protocol.rsplit('+', 1)[-1] # Determine if our origin is a secure origin by looking through our # hardcoded list of secure origins, as well as any additional ones # configured on this PackageFinder instance. for secure_origin in self.iter_secure_origins(): secure_protocol, secure_host, secure_port = secure_origin if origin_protocol != secure_protocol and secure_protocol != "*": continue try: addr = ipaddress.ip_address( None if origin_host is None else six.ensure_text(origin_host) ) network = ipaddress.ip_network( six.ensure_text(secure_host) ) except ValueError: # We don't have both a valid address or a valid network, so # we'll check this origin against hostnames. if ( origin_host and origin_host.lower() != secure_host.lower() and secure_host != "*" ): continue else: # We have a valid address and network, so see if the address # is contained within the network. if addr not in network: continue # Check to see if the port matches. if ( origin_port != secure_port and secure_port != "*" and secure_port is not None ): continue # If we've gotten here, then this origin matches the current # secure origin and we should return True return True # If we've gotten to this point, then the origin isn't secure and we # will not accept it as a valid location to search. We will however # log a warning that we are ignoring it. logger.warning( "The repository located at %s is not a trusted or secure host and " "is being ignored. If this repository is available via HTTPS we " "recommend you use HTTPS instead, otherwise you may silence " "this warning and allow it anyway with '--trusted-host %s'.", origin_host, origin_host, ) return False def request(self, method, url, *args, **kwargs): # Allow setting a default timeout on a session kwargs.setdefault("timeout", self.timeout) # Dispatch the actual request return super(PipSession, self).request(method, url, *args, **kwargs)
Django-locallibrary/env/Lib/site-packages/pip/_internal/network/session.py/0
{ "file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/network/session.py", "repo_id": "Django-locallibrary", "token_count": 6329 }
13
import logging import os from pip._internal.utils.subprocess import runner_with_spinner_message from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: from typing import List, Optional from pip._vendor.pep517.wrappers import Pep517HookCaller logger = logging.getLogger(__name__) def build_wheel_pep517( name, # type: str backend, # type: Pep517HookCaller metadata_directory, # type: str build_options, # type: List[str] tempd, # type: str ): # type: (...) -> Optional[str] """Build one InstallRequirement using the PEP 517 build process. Returns path to wheel if successfully built. Otherwise, returns None. """ assert metadata_directory is not None if build_options: # PEP 517 does not support --build-options logger.error('Cannot build wheel for %s using PEP 517 when ' '--build-option is present', name) return None try: logger.debug('Destination directory: %s', tempd) runner = runner_with_spinner_message( 'Building wheel for {} (PEP 517)'.format(name) ) with backend.subprocess_runner(runner): wheel_name = backend.build_wheel( tempd, metadata_directory=metadata_directory, ) except Exception: logger.error('Failed building wheel for %s', name) return None return os.path.join(tempd, wheel_name)
Django-locallibrary/env/Lib/site-packages/pip/_internal/operations/build/wheel.py/0
{ "file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/operations/build/wheel.py", "repo_id": "Django-locallibrary", "token_count": 587 }
14
from pip._vendor.packaging.utils import canonicalize_name from pip._internal.utils.typing import MYPY_CHECK_RUNNING from .base import Requirement, format_name if MYPY_CHECK_RUNNING: from pip._vendor.packaging.specifiers import SpecifierSet from pip._internal.req.req_install import InstallRequirement from .base import Candidate, CandidateLookup class ExplicitRequirement(Requirement): def __init__(self, candidate): # type: (Candidate) -> None self.candidate = candidate def __repr__(self): # type: () -> str return "{class_name}({candidate!r})".format( class_name=self.__class__.__name__, candidate=self.candidate, ) @property def name(self): # type: () -> str # No need to canonicalise - the candidate did this return self.candidate.name def format_for_error(self): # type: () -> str return self.candidate.format_for_error() def get_candidate_lookup(self): # type: () -> CandidateLookup return self.candidate, None def is_satisfied_by(self, candidate): # type: (Candidate) -> bool return candidate == self.candidate class SpecifierRequirement(Requirement): def __init__(self, ireq): # type: (InstallRequirement) -> None assert ireq.link is None, "This is a link, not a specifier" self._ireq = ireq self._extras = frozenset(ireq.extras) def __str__(self): # type: () -> str return str(self._ireq.req) def __repr__(self): # type: () -> str return "{class_name}({requirement!r})".format( class_name=self.__class__.__name__, requirement=str(self._ireq.req), ) @property def name(self): # type: () -> str canonical_name = canonicalize_name(self._ireq.req.name) return format_name(canonical_name, self._extras) def format_for_error(self): # type: () -> str # Convert comma-separated specifiers into "A, B, ..., F and G" # This makes the specifier a bit more "human readable", without # risking a change in meaning. (Hopefully! Not all edge cases have # been checked) parts = [s.strip() for s in str(self).split(",")] if len(parts) == 0: return "" elif len(parts) == 1: return parts[0] return ", ".join(parts[:-1]) + " and " + parts[-1] def get_candidate_lookup(self): # type: () -> CandidateLookup return None, self._ireq def is_satisfied_by(self, candidate): # type: (Candidate) -> bool assert candidate.name == self.name, \ "Internal issue: Candidate is not for this requirement " \ " {} vs {}".format(candidate.name, self.name) # We can safely always allow prereleases here since PackageFinder # already implements the prerelease logic, and would have filtered out # prerelease candidates if the user does not expect them. spec = self._ireq.req.specifier return spec.contains(candidate.version, prereleases=True) class RequiresPythonRequirement(Requirement): """A requirement representing Requires-Python metadata. """ def __init__(self, specifier, match): # type: (SpecifierSet, Candidate) -> None self.specifier = specifier self._candidate = match def __repr__(self): # type: () -> str return "{class_name}({specifier!r})".format( class_name=self.__class__.__name__, specifier=str(self.specifier), ) @property def name(self): # type: () -> str return self._candidate.name def format_for_error(self): # type: () -> str return "Python " + str(self.specifier) def get_candidate_lookup(self): # type: () -> CandidateLookup if self.specifier.contains(self._candidate.version, prereleases=True): return self._candidate, None return None, None def is_satisfied_by(self, candidate): # type: (Candidate) -> bool assert candidate.name == self._candidate.name, "Not Python candidate" # We can safely always allow prereleases here since PackageFinder # already implements the prerelease logic, and would have filtered out # prerelease candidates if the user does not expect them. return self.specifier.contains(candidate.version, prereleases=True)
Django-locallibrary/env/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py/0
{ "file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/resolution/resolvelib/requirements.py", "repo_id": "Django-locallibrary", "token_count": 1830 }
15
# The following comment should be removed at some point in the future. # mypy: strict-optional=False # mypy: disallow-untyped-defs=False from __future__ import absolute_import import contextlib import errno import getpass import hashlib import io import logging import os import posixpath import shutil import stat import sys from collections import deque from itertools import tee from pip._vendor import pkg_resources from pip._vendor.packaging.utils import canonicalize_name # NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is # why we ignore the type on this import. from pip._vendor.retrying import retry # type: ignore from pip._vendor.six import PY2, text_type from pip._vendor.six.moves import filter, filterfalse, input, map, zip_longest from pip._vendor.six.moves.urllib import parse as urllib_parse from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote from pip import __version__ from pip._internal.exceptions import CommandError from pip._internal.locations import ( get_major_minor_version, site_packages, user_site, ) from pip._internal.utils.compat import ( WINDOWS, expanduser, stdlib_pkgs, str_to_display, ) from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast from pip._internal.utils.virtualenv import ( running_under_virtualenv, virtualenv_no_global, ) if PY2: from io import BytesIO as StringIO else: from io import StringIO if MYPY_CHECK_RUNNING: from typing import ( Any, AnyStr, Callable, Container, Iterable, Iterator, List, Optional, Text, Tuple, TypeVar, Union, ) from pip._vendor.pkg_resources import Distribution VersionInfo = Tuple[int, int, int] T = TypeVar("T") __all__ = ['rmtree', 'display_path', 'backup_dir', 'ask', 'splitext', 'format_size', 'is_installable_dir', 'normalize_path', 'renames', 'get_prog', 'captured_stdout', 'ensure_dir', 'get_installed_version', 'remove_auth_from_url'] logger = logging.getLogger(__name__) def get_pip_version(): # type: () -> str pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") pip_pkg_dir = os.path.abspath(pip_pkg_dir) return ( 'pip {} from {} (python {})'.format( __version__, pip_pkg_dir, get_major_minor_version(), ) ) def normalize_version_info(py_version_info): # type: (Tuple[int, ...]) -> Tuple[int, int, int] """ Convert a tuple of ints representing a Python version to one of length three. :param py_version_info: a tuple of ints representing a Python version, or None to specify no version. The tuple can have any length. :return: a tuple of length three if `py_version_info` is non-None. Otherwise, return `py_version_info` unchanged (i.e. None). """ if len(py_version_info) < 3: py_version_info += (3 - len(py_version_info)) * (0,) elif len(py_version_info) > 3: py_version_info = py_version_info[:3] return cast('VersionInfo', py_version_info) def ensure_dir(path): # type: (AnyStr) -> None """os.path.makedirs without EEXIST.""" try: os.makedirs(path) except OSError as e: # Windows can raise spurious ENOTEMPTY errors. See #6426. if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY: raise def get_prog(): # type: () -> str try: prog = os.path.basename(sys.argv[0]) if prog in ('__main__.py', '-c'): return "{} -m pip".format(sys.executable) else: return prog except (AttributeError, TypeError, IndexError): pass return 'pip' # Retry every half second for up to 3 seconds @retry(stop_max_delay=3000, wait_fixed=500) def rmtree(dir, ignore_errors=False): # type: (Text, bool) -> None shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) def rmtree_errorhandler(func, path, exc_info): """On Windows, the files in .svn are read-only, so when rmtree() tries to remove them, an exception is thrown. We catch that here, remove the read-only attribute, and hopefully continue without problems.""" try: has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE) except (IOError, OSError): # it's equivalent to os.path.exists return if has_attr_readonly: # convert to read/write os.chmod(path, stat.S_IWRITE) # use the original function to repeat the operation func(path) return else: raise def path_to_display(path): # type: (Optional[Union[str, Text]]) -> Optional[Text] """ Convert a bytes (or text) path to text (unicode in Python 2) for display and logging purposes. This function should never error out. Also, this function is mainly needed for Python 2 since in Python 3 str paths are already text. """ if path is None: return None if isinstance(path, text_type): return path # Otherwise, path is a bytes object (str in Python 2). try: display_path = path.decode(sys.getfilesystemencoding(), 'strict') except UnicodeDecodeError: # Include the full bytes to make troubleshooting easier, even though # it may not be very human readable. if PY2: # Convert the bytes to a readable str representation using # repr(), and then convert the str to unicode. # Also, we add the prefix "b" to the repr() return value both # to make the Python 2 output look like the Python 3 output, and # to signal to the user that this is a bytes representation. display_path = str_to_display('b{!r}'.format(path)) else: # Silence the "F821 undefined name 'ascii'" flake8 error since # in Python 3 ascii() is a built-in. display_path = ascii(path) # noqa: F821 return display_path def display_path(path): # type: (Union[str, Text]) -> str """Gives the display value for a given path, making it relative to cwd if possible.""" path = os.path.normcase(os.path.abspath(path)) if sys.version_info[0] == 2: path = path.decode(sys.getfilesystemencoding(), 'replace') path = path.encode(sys.getdefaultencoding(), 'replace') if path.startswith(os.getcwd() + os.path.sep): path = '.' + path[len(os.getcwd()):] return path def backup_dir(dir, ext='.bak'): # type: (str, str) -> str """Figure out the name of a directory to back up the given dir to (adding .bak, .bak2, etc)""" n = 1 extension = ext while os.path.exists(dir + extension): n += 1 extension = ext + str(n) return dir + extension def ask_path_exists(message, options): # type: (str, Iterable[str]) -> str for action in os.environ.get('PIP_EXISTS_ACTION', '').split(): if action in options: return action return ask(message, options) def _check_no_input(message): # type: (str) -> None """Raise an error if no input is allowed.""" if os.environ.get('PIP_NO_INPUT'): raise Exception( 'No input was expected ($PIP_NO_INPUT set); question: {}'.format( message) ) def ask(message, options): # type: (str, Iterable[str]) -> str """Ask the message interactively, with the given possible responses""" while 1: _check_no_input(message) response = input(message) response = response.strip().lower() if response not in options: print( 'Your response ({!r}) was not one of the expected responses: ' '{}'.format(response, ', '.join(options)) ) else: return response def ask_input(message): # type: (str) -> str """Ask for input interactively.""" _check_no_input(message) return input(message) def ask_password(message): # type: (str) -> str """Ask for a password interactively.""" _check_no_input(message) return getpass.getpass(message) def format_size(bytes): # type: (float) -> str if bytes > 1000 * 1000: return '{:.1f} MB'.format(bytes / 1000.0 / 1000) elif bytes > 10 * 1000: return '{} kB'.format(int(bytes / 1000)) elif bytes > 1000: return '{:.1f} kB'.format(bytes / 1000.0) else: return '{} bytes'.format(int(bytes)) def tabulate(rows): # type: (Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]] """Return a list of formatted rows and a list of column sizes. For example:: >>> tabulate([['foobar', 2000], [0xdeadbeef]]) (['foobar 2000', '3735928559'], [10, 4]) """ rows = [tuple(map(str, row)) for row in rows] sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue='')] table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows] return table, sizes def is_installable_dir(path): # type: (str) -> bool """Is path is a directory containing setup.py or pyproject.toml? """ if not os.path.isdir(path): return False setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): return True pyproject_toml = os.path.join(path, 'pyproject.toml') if os.path.isfile(pyproject_toml): return True return False def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): """Yield pieces of data from a file-like object until EOF.""" while True: chunk = file.read(size) if not chunk: break yield chunk def normalize_path(path, resolve_symlinks=True): # type: (str, bool) -> str """ Convert a path to its canonical, case-normalized, absolute version. """ path = expanduser(path) if resolve_symlinks: path = os.path.realpath(path) else: path = os.path.abspath(path) return os.path.normcase(path) def splitext(path): # type: (str) -> Tuple[str, str] """Like os.path.splitext, but take off .tar too""" base, ext = posixpath.splitext(path) if base.lower().endswith('.tar'): ext = base[-4:] + ext base = base[:-4] return base, ext def renames(old, new): # type: (str, str) -> None """Like os.renames(), but handles renaming across devices.""" # Implementation borrowed from os.renames(). head, tail = os.path.split(new) if head and tail and not os.path.exists(head): os.makedirs(head) shutil.move(old, new) head, tail = os.path.split(old) if head and tail: try: os.removedirs(head) except OSError: pass def is_local(path): # type: (str) -> bool """ Return True if path is within sys.prefix, if we're running in a virtualenv. If we're not in a virtualenv, all paths are considered "local." Caution: this function assumes the head of path has been normalized with normalize_path. """ if not running_under_virtualenv(): return True return path.startswith(normalize_path(sys.prefix)) def dist_is_local(dist): # type: (Distribution) -> bool """ Return True if given Distribution object is installed locally (i.e. within current virtualenv). Always True if we're not in a virtualenv. """ return is_local(dist_location(dist)) def dist_in_usersite(dist): # type: (Distribution) -> bool """ Return True if given Distribution is installed in user site. """ return dist_location(dist).startswith(normalize_path(user_site)) def dist_in_site_packages(dist): # type: (Distribution) -> bool """ Return True if given Distribution is installed in sysconfig.get_python_lib(). """ return dist_location(dist).startswith(normalize_path(site_packages)) def dist_is_editable(dist): # type: (Distribution) -> bool """ Return True if given Distribution is an editable install. """ for path_item in sys.path: egg_link = os.path.join(path_item, dist.project_name + '.egg-link') if os.path.isfile(egg_link): return True return False def get_installed_distributions( local_only=True, # type: bool skip=stdlib_pkgs, # type: Container[str] include_editables=True, # type: bool editables_only=False, # type: bool user_only=False, # type: bool paths=None # type: Optional[List[str]] ): # type: (...) -> List[Distribution] """ Return a list of installed Distribution objects. If ``local_only`` is True (default), only return installations local to the current virtualenv, if in a virtualenv. ``skip`` argument is an iterable of lower-case project names to ignore; defaults to stdlib_pkgs If ``include_editables`` is False, don't report editables. If ``editables_only`` is True , only report editables. If ``user_only`` is True , only report installations in the user site directory. If ``paths`` is set, only report the distributions present at the specified list of locations. """ if paths: working_set = pkg_resources.WorkingSet(paths) else: working_set = pkg_resources.working_set if local_only: local_test = dist_is_local else: def local_test(d): return True if include_editables: def editable_test(d): return True else: def editable_test(d): return not dist_is_editable(d) if editables_only: def editables_only_test(d): return dist_is_editable(d) else: def editables_only_test(d): return True if user_only: user_test = dist_in_usersite else: def user_test(d): return True return [d for d in working_set if local_test(d) and d.key not in skip and editable_test(d) and editables_only_test(d) and user_test(d) ] def _search_distribution(req_name): # type: (str) -> Optional[Distribution] """Find a distribution matching the ``req_name`` in the environment. This searches from *all* distributions available in the environment, to match the behavior of ``pkg_resources.get_distribution()``. """ # Canonicalize the name before searching in the list of # installed distributions and also while creating the package # dictionary to get the Distribution object req_name = canonicalize_name(req_name) packages = get_installed_distributions( local_only=False, skip=(), include_editables=True, editables_only=False, user_only=False, paths=None, ) pkg_dict = {canonicalize_name(p.key): p for p in packages} return pkg_dict.get(req_name) def get_distribution(req_name): # type: (str) -> Optional[Distribution] """Given a requirement name, return the installed Distribution object. This searches from *all* distributions available in the environment, to match the behavior of ``pkg_resources.get_distribution()``. """ # Search the distribution by looking through the working set dist = _search_distribution(req_name) # If distribution could not be found, call working_set.require # to update the working set, and try to find the distribution # again. # This might happen for e.g. when you install a package # twice, once using setup.py develop and again using setup.py install. # Now when run pip uninstall twice, the package gets removed # from the working set in the first uninstall, so we have to populate # the working set again so that pip knows about it and the packages # gets picked up and is successfully uninstalled the second time too. if not dist: try: pkg_resources.working_set.require(req_name) except pkg_resources.DistributionNotFound: return None return _search_distribution(req_name) def egg_link_path(dist): # type: (Distribution) -> Optional[str] """ Return the path for the .egg-link file if it exists, otherwise, None. There's 3 scenarios: 1) not in a virtualenv try to find in site.USER_SITE, then site_packages 2) in a no-global virtualenv try to find in site_packages 3) in a yes-global virtualenv try to find in site_packages, then site.USER_SITE (don't look in global location) For #1 and #3, there could be odd cases, where there's an egg-link in 2 locations. This method will just return the first one found. """ sites = [] if running_under_virtualenv(): sites.append(site_packages) if not virtualenv_no_global() and user_site: sites.append(user_site) else: if user_site: sites.append(user_site) sites.append(site_packages) for site in sites: egglink = os.path.join(site, dist.project_name) + '.egg-link' if os.path.isfile(egglink): return egglink return None def dist_location(dist): # type: (Distribution) -> str """ Get the site-packages location of this distribution. Generally this is dist.location, except in the case of develop-installed packages, where dist.location is the source code location, and we want to know where the egg-link file is. The returned location is normalized (in particular, with symlinks removed). """ egg_link = egg_link_path(dist) if egg_link: return normalize_path(egg_link) return normalize_path(dist.location) def write_output(msg, *args): # type: (Any, Any) -> None logger.info(msg, *args) class FakeFile(object): """Wrap a list of lines in an object with readline() to make ConfigParser happy.""" def __init__(self, lines): self._gen = iter(lines) def readline(self): try: return next(self._gen) except StopIteration: return '' def __iter__(self): return self._gen class StreamWrapper(StringIO): @classmethod def from_stream(cls, orig_stream): cls.orig_stream = orig_stream return cls() # compileall.compile_dir() needs stdout.encoding to print to stdout @property def encoding(self): return self.orig_stream.encoding @contextlib.contextmanager def captured_output(stream_name): """Return a context manager used by captured_stdout/stdin/stderr that temporarily replaces the sys stream *stream_name* with a StringIO. Taken from Lib/support/__init__.py in the CPython repo. """ orig_stdout = getattr(sys, stream_name) setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) try: yield getattr(sys, stream_name) finally: setattr(sys, stream_name, orig_stdout) def captured_stdout(): """Capture the output of sys.stdout: with captured_stdout() as stdout: print('hello') self.assertEqual(stdout.getvalue(), 'hello\n') Taken from Lib/support/__init__.py in the CPython repo. """ return captured_output('stdout') def captured_stderr(): """ See captured_stdout(). """ return captured_output('stderr') def get_installed_version(dist_name, working_set=None): """Get the installed version of dist_name avoiding pkg_resources cache""" # Create a requirement that we'll look for inside of setuptools. req = pkg_resources.Requirement.parse(dist_name) if working_set is None: # We want to avoid having this cached, so we need to construct a new # working set each time. working_set = pkg_resources.WorkingSet() # Get the installed distribution from our working set dist = working_set.find(req) # Check to see if we got an installed distribution or not, if we did # we want to return it's version. return dist.version if dist else None def consume(iterator): """Consume an iterable at C speed.""" deque(iterator, maxlen=0) # Simulates an enum def enum(*sequential, **named): enums = dict(zip(sequential, range(len(sequential))), **named) reverse = {value: key for key, value in enums.items()} enums['reverse_mapping'] = reverse return type('Enum', (), enums) def build_netloc(host, port): # type: (str, Optional[int]) -> str """ Build a netloc from a host-port pair """ if port is None: return host if ':' in host: # Only wrap host with square brackets when it is IPv6 host = '[{}]'.format(host) return '{}:{}'.format(host, port) def build_url_from_netloc(netloc, scheme='https'): # type: (str, str) -> str """ Build a full URL from a netloc. """ if netloc.count(':') >= 2 and '@' not in netloc and '[' not in netloc: # It must be a bare IPv6 address, so wrap it with brackets. netloc = '[{}]'.format(netloc) return '{}://{}'.format(scheme, netloc) def parse_netloc(netloc): # type: (str) -> Tuple[str, Optional[int]] """ Return the host-port pair from a netloc. """ url = build_url_from_netloc(netloc) parsed = urllib_parse.urlparse(url) return parsed.hostname, parsed.port def split_auth_from_netloc(netloc): """ Parse out and remove the auth information from a netloc. Returns: (netloc, (username, password)). """ if '@' not in netloc: return netloc, (None, None) # Split from the right because that's how urllib.parse.urlsplit() # behaves if more than one @ is present (which can be checked using # the password attribute of urlsplit()'s return value). auth, netloc = netloc.rsplit('@', 1) if ':' in auth: # Split from the left because that's how urllib.parse.urlsplit() # behaves if more than one : is present (which again can be checked # using the password attribute of the return value) user_pass = auth.split(':', 1) else: user_pass = auth, None user_pass = tuple( None if x is None else urllib_unquote(x) for x in user_pass ) return netloc, user_pass def redact_netloc(netloc): # type: (str) -> str """ Replace the sensitive data in a netloc with "****", if it exists. For example: - "user:pass@example.com" returns "user:****@example.com" - "accesstoken@example.com" returns "****@example.com" """ netloc, (user, password) = split_auth_from_netloc(netloc) if user is None: return netloc if password is None: user = '****' password = '' else: user = urllib_parse.quote(user) password = ':****' return '{user}{password}@{netloc}'.format(user=user, password=password, netloc=netloc) def _transform_url(url, transform_netloc): """Transform and replace netloc in a url. transform_netloc is a function taking the netloc and returning a tuple. The first element of this tuple is the new netloc. The entire tuple is returned. Returns a tuple containing the transformed url as item 0 and the original tuple returned by transform_netloc as item 1. """ purl = urllib_parse.urlsplit(url) netloc_tuple = transform_netloc(purl.netloc) # stripped url url_pieces = ( purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment ) surl = urllib_parse.urlunsplit(url_pieces) return surl, netloc_tuple def _get_netloc(netloc): return split_auth_from_netloc(netloc) def _redact_netloc(netloc): return (redact_netloc(netloc),) def split_auth_netloc_from_url(url): # type: (str) -> Tuple[str, str, Tuple[str, str]] """ Parse a url into separate netloc, auth, and url with no auth. Returns: (url_without_auth, netloc, (username, password)) """ url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc) return url_without_auth, netloc, auth def remove_auth_from_url(url): # type: (str) -> str """Return a copy of url with 'username:password@' removed.""" # username/pass params are passed to subversion through flags # and are not recognized in the url. return _transform_url(url, _get_netloc)[0] def redact_auth_from_url(url): # type: (str) -> str """Replace the password in a given url with ****.""" return _transform_url(url, _redact_netloc)[0] class HiddenText(object): def __init__( self, secret, # type: str redacted, # type: str ): # type: (...) -> None self.secret = secret self.redacted = redacted def __repr__(self): # type: (...) -> str return '<HiddenText {!r}>'.format(str(self)) def __str__(self): # type: (...) -> str return self.redacted # This is useful for testing. def __eq__(self, other): # type: (Any) -> bool if type(self) != type(other): return False # The string being used for redaction doesn't also have to match, # just the raw, original string. return (self.secret == other.secret) # We need to provide an explicit __ne__ implementation for Python 2. # TODO: remove this when we drop PY2 support. def __ne__(self, other): # type: (Any) -> bool return not self == other def hide_value(value): # type: (str) -> HiddenText return HiddenText(value, redacted='****') def hide_url(url): # type: (str) -> HiddenText redacted = redact_auth_from_url(url) return HiddenText(url, redacted=redacted) def protect_pip_from_modification_on_windows(modifying_pip): # type: (bool) -> None """Protection of pip.exe from modification on Windows On Windows, any operation modifying pip should be run as: python -m pip ... """ pip_names = [ "pip.exe", "pip{}.exe".format(sys.version_info[0]), "pip{}.{}.exe".format(*sys.version_info[:2]) ] # See https://github.com/pypa/pip/issues/1299 for more discussion should_show_use_python_msg = ( modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names ) if should_show_use_python_msg: new_command = [ sys.executable, "-m", "pip" ] + sys.argv[1:] raise CommandError( 'To modify pip, please run the following command:\n{}' .format(" ".join(new_command)) ) def is_console_interactive(): # type: () -> bool """Is this console interactive? """ return sys.stdin is not None and sys.stdin.isatty() def hash_file(path, blocksize=1 << 20): # type: (Text, int) -> Tuple[Any, int] """Return (hash, length) for path using hashlib.sha256() """ h = hashlib.sha256() length = 0 with open(path, 'rb') as f: for block in read_chunks(f, size=blocksize): length += len(block) h.update(block) return h, length def is_wheel_installed(): """ Return whether the wheel package is installed. """ try: import wheel # noqa: F401 except ImportError: return False return True def pairwise(iterable): # type: (Iterable[Any]) -> Iterator[Tuple[Any, Any]] """ Return paired elements. For example: s -> (s0, s1), (s2, s3), (s4, s5), ... """ iterable = iter(iterable) return zip_longest(iterable, iterable) def partition( pred, # type: Callable[[T], bool] iterable, # type: Iterable[T] ): # type: (...) -> Tuple[Iterable[T], Iterable[T]] """ Use a predicate to partition entries into false entries and true entries, like partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9 """ t1, t2 = tee(iterable) return filterfalse(pred, t1), filter(pred, t2)
Django-locallibrary/env/Lib/site-packages/pip/_internal/utils/misc.py/0
{ "file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/utils/misc.py", "repo_id": "Django-locallibrary", "token_count": 11160 }
16
import types import functools import zlib from pip._vendor.requests.adapters import HTTPAdapter from .controller import CacheController from .cache import DictCache from .filewrapper import CallbackFileWrapper class CacheControlAdapter(HTTPAdapter): invalidating_methods = {"PUT", "DELETE"} def __init__( self, cache=None, cache_etags=True, controller_class=None, serializer=None, heuristic=None, cacheable_methods=None, *args, **kw ): super(CacheControlAdapter, self).__init__(*args, **kw) self.cache = DictCache() if cache is None else cache self.heuristic = heuristic self.cacheable_methods = cacheable_methods or ("GET",) controller_factory = controller_class or CacheController self.controller = controller_factory( self.cache, cache_etags=cache_etags, serializer=serializer ) def send(self, request, cacheable_methods=None, **kw): """ Send a request. Use the request information to see if it exists in the cache and cache the response if we need to and can. """ cacheable = cacheable_methods or self.cacheable_methods if request.method in cacheable: try: cached_response = self.controller.cached_request(request) except zlib.error: cached_response = None if cached_response: return self.build_response(request, cached_response, from_cache=True) # check for etags and add headers if appropriate request.headers.update(self.controller.conditional_headers(request)) resp = super(CacheControlAdapter, self).send(request, **kw) return resp def build_response( self, request, response, from_cache=False, cacheable_methods=None ): """ Build a response by making a request or using the cache. This will end up calling send and returning a potentially cached response """ cacheable = cacheable_methods or self.cacheable_methods if not from_cache and request.method in cacheable: # Check for any heuristics that might update headers # before trying to cache. if self.heuristic: response = self.heuristic.apply(response) # apply any expiration heuristics if response.status == 304: # We must have sent an ETag request. This could mean # that we've been expired already or that we simply # have an etag. In either case, we want to try and # update the cache if that is the case. cached_response = self.controller.update_cached_response( request, response ) if cached_response is not response: from_cache = True # We are done with the server response, read a # possible response body (compliant servers will # not return one, but we cannot be 100% sure) and # release the connection back to the pool. response.read(decode_content=False) response.release_conn() response = cached_response # We always cache the 301 responses elif response.status == 301: self.controller.cache_response(request, response) else: # Wrap the response file with a wrapper that will cache the # response when the stream has been consumed. response._fp = CallbackFileWrapper( response._fp, functools.partial( self.controller.cache_response, request, response ), ) if response.chunked: super_update_chunk_length = response._update_chunk_length def _update_chunk_length(self): super_update_chunk_length() if self.chunk_left == 0: self._fp._close() response._update_chunk_length = types.MethodType( _update_chunk_length, response ) resp = super(CacheControlAdapter, self).build_response(request, response) # See if we should invalidate the cache. if request.method in self.invalidating_methods and resp.ok: cache_url = self.controller.cache_url(request.url) self.cache.delete(cache_url) # Give the request a from_cache attr to let people use it resp.from_cache = from_cache return resp def close(self): self.cache.close() super(CacheControlAdapter, self).close()
Django-locallibrary/env/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py/0
{ "file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/cachecontrol/adapter.py", "repo_id": "Django-locallibrary", "token_count": 2182 }
17
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .enums import ProbingState from .charsetprober import CharSetProber class CharSetGroupProber(CharSetProber): def __init__(self, lang_filter=None): super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) self._active_num = 0 self.probers = [] self._best_guess_prober = None def reset(self): super(CharSetGroupProber, self).reset() self._active_num = 0 for prober in self.probers: if prober: prober.reset() prober.active = True self._active_num += 1 self._best_guess_prober = None @property def charset_name(self): if not self._best_guess_prober: self.get_confidence() if not self._best_guess_prober: return None return self._best_guess_prober.charset_name @property def language(self): if not self._best_guess_prober: self.get_confidence() if not self._best_guess_prober: return None return self._best_guess_prober.language def feed(self, byte_str): for prober in self.probers: if not prober: continue if not prober.active: continue state = prober.feed(byte_str) if not state: continue if state == ProbingState.FOUND_IT: self._best_guess_prober = prober return self.state elif state == ProbingState.NOT_ME: prober.active = False self._active_num -= 1 if self._active_num <= 0: self._state = ProbingState.NOT_ME return self.state return self.state def get_confidence(self): state = self.state if state == ProbingState.FOUND_IT: return 0.99 elif state == ProbingState.NOT_ME: return 0.01 best_conf = 0.0 self._best_guess_prober = None for prober in self.probers: if not prober: continue if not prober.active: self.logger.debug('%s not active', prober.charset_name) continue conf = prober.get_confidence() self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) if best_conf < conf: best_conf = conf self._best_guess_prober = prober if not self._best_guess_prober: return 0.0 return best_conf
Django-locallibrary/env/Lib/site-packages/pip/_vendor/chardet/charsetgroupprober.py/0
{ "file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/chardet/charsetgroupprober.py", "repo_id": "Django-locallibrary", "token_count": 1642 }
18
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .enums import MachineState # BIG5 BIG5_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 1,1,1,1,1,1,1,1, # 30 - 37 1,1,1,1,1,1,1,1, # 38 - 3f 2,2,2,2,2,2,2,2, # 40 - 47 2,2,2,2,2,2,2,2, # 48 - 4f 2,2,2,2,2,2,2,2, # 50 - 57 2,2,2,2,2,2,2,2, # 58 - 5f 2,2,2,2,2,2,2,2, # 60 - 67 2,2,2,2,2,2,2,2, # 68 - 6f 2,2,2,2,2,2,2,2, # 70 - 77 2,2,2,2,2,2,2,1, # 78 - 7f 4,4,4,4,4,4,4,4, # 80 - 87 4,4,4,4,4,4,4,4, # 88 - 8f 4,4,4,4,4,4,4,4, # 90 - 97 4,4,4,4,4,4,4,4, # 98 - 9f 4,3,3,3,3,3,3,3, # a0 - a7 3,3,3,3,3,3,3,3, # a8 - af 3,3,3,3,3,3,3,3, # b0 - b7 3,3,3,3,3,3,3,3, # b8 - bf 3,3,3,3,3,3,3,3, # c0 - c7 3,3,3,3,3,3,3,3, # c8 - cf 3,3,3,3,3,3,3,3, # d0 - d7 3,3,3,3,3,3,3,3, # d8 - df 3,3,3,3,3,3,3,3, # e0 - e7 3,3,3,3,3,3,3,3, # e8 - ef 3,3,3,3,3,3,3,3, # f0 - f7 3,3,3,3,3,3,3,0 # f8 - ff ) BIG5_ST = ( MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 ) BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) BIG5_SM_MODEL = {'class_table': BIG5_CLS, 'class_factor': 5, 'state_table': BIG5_ST, 'char_len_table': BIG5_CHAR_LEN_TABLE, 'name': 'Big5'} # CP949 CP949_CLS = ( 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff ) CP949_ST = ( #cls= 0 1 2 3 4 5 6 7 8 9 # previous state = MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 ) CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) CP949_SM_MODEL = {'class_table': CP949_CLS, 'class_factor': 10, 'state_table': CP949_ST, 'char_len_table': CP949_CHAR_LEN_TABLE, 'name': 'CP949'} # EUC-JP EUCJP_CLS = ( 4,4,4,4,4,4,4,4, # 00 - 07 4,4,4,4,4,4,5,5, # 08 - 0f 4,4,4,4,4,4,4,4, # 10 - 17 4,4,4,5,4,4,4,4, # 18 - 1f 4,4,4,4,4,4,4,4, # 20 - 27 4,4,4,4,4,4,4,4, # 28 - 2f 4,4,4,4,4,4,4,4, # 30 - 37 4,4,4,4,4,4,4,4, # 38 - 3f 4,4,4,4,4,4,4,4, # 40 - 47 4,4,4,4,4,4,4,4, # 48 - 4f 4,4,4,4,4,4,4,4, # 50 - 57 4,4,4,4,4,4,4,4, # 58 - 5f 4,4,4,4,4,4,4,4, # 60 - 67 4,4,4,4,4,4,4,4, # 68 - 6f 4,4,4,4,4,4,4,4, # 70 - 77 4,4,4,4,4,4,4,4, # 78 - 7f 5,5,5,5,5,5,5,5, # 80 - 87 5,5,5,5,5,5,1,3, # 88 - 8f 5,5,5,5,5,5,5,5, # 90 - 97 5,5,5,5,5,5,5,5, # 98 - 9f 5,2,2,2,2,2,2,2, # a0 - a7 2,2,2,2,2,2,2,2, # a8 - af 2,2,2,2,2,2,2,2, # b0 - b7 2,2,2,2,2,2,2,2, # b8 - bf 2,2,2,2,2,2,2,2, # c0 - c7 2,2,2,2,2,2,2,2, # c8 - cf 2,2,2,2,2,2,2,2, # d0 - d7 2,2,2,2,2,2,2,2, # d8 - df 0,0,0,0,0,0,0,0, # e0 - e7 0,0,0,0,0,0,0,0, # e8 - ef 0,0,0,0,0,0,0,0, # f0 - f7 0,0,0,0,0,0,0,5 # f8 - ff ) EUCJP_ST = ( 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 ) EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, 'class_factor': 6, 'state_table': EUCJP_ST, 'char_len_table': EUCJP_CHAR_LEN_TABLE, 'name': 'EUC-JP'} # EUC-KR EUCKR_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 1,1,1,1,1,1,1,1, # 30 - 37 1,1,1,1,1,1,1,1, # 38 - 3f 1,1,1,1,1,1,1,1, # 40 - 47 1,1,1,1,1,1,1,1, # 48 - 4f 1,1,1,1,1,1,1,1, # 50 - 57 1,1,1,1,1,1,1,1, # 58 - 5f 1,1,1,1,1,1,1,1, # 60 - 67 1,1,1,1,1,1,1,1, # 68 - 6f 1,1,1,1,1,1,1,1, # 70 - 77 1,1,1,1,1,1,1,1, # 78 - 7f 0,0,0,0,0,0,0,0, # 80 - 87 0,0,0,0,0,0,0,0, # 88 - 8f 0,0,0,0,0,0,0,0, # 90 - 97 0,0,0,0,0,0,0,0, # 98 - 9f 0,2,2,2,2,2,2,2, # a0 - a7 2,2,2,2,2,3,3,3, # a8 - af 2,2,2,2,2,2,2,2, # b0 - b7 2,2,2,2,2,2,2,2, # b8 - bf 2,2,2,2,2,2,2,2, # c0 - c7 2,3,2,2,2,2,2,2, # c8 - cf 2,2,2,2,2,2,2,2, # d0 - d7 2,2,2,2,2,2,2,2, # d8 - df 2,2,2,2,2,2,2,2, # e0 - e7 2,2,2,2,2,2,2,2, # e8 - ef 2,2,2,2,2,2,2,2, # f0 - f7 2,2,2,2,2,2,2,0 # f8 - ff ) EUCKR_ST = ( MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f ) EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, 'class_factor': 4, 'state_table': EUCKR_ST, 'char_len_table': EUCKR_CHAR_LEN_TABLE, 'name': 'EUC-KR'} # EUC-TW EUCTW_CLS = ( 2,2,2,2,2,2,2,2, # 00 - 07 2,2,2,2,2,2,0,0, # 08 - 0f 2,2,2,2,2,2,2,2, # 10 - 17 2,2,2,0,2,2,2,2, # 18 - 1f 2,2,2,2,2,2,2,2, # 20 - 27 2,2,2,2,2,2,2,2, # 28 - 2f 2,2,2,2,2,2,2,2, # 30 - 37 2,2,2,2,2,2,2,2, # 38 - 3f 2,2,2,2,2,2,2,2, # 40 - 47 2,2,2,2,2,2,2,2, # 48 - 4f 2,2,2,2,2,2,2,2, # 50 - 57 2,2,2,2,2,2,2,2, # 58 - 5f 2,2,2,2,2,2,2,2, # 60 - 67 2,2,2,2,2,2,2,2, # 68 - 6f 2,2,2,2,2,2,2,2, # 70 - 77 2,2,2,2,2,2,2,2, # 78 - 7f 0,0,0,0,0,0,0,0, # 80 - 87 0,0,0,0,0,0,6,0, # 88 - 8f 0,0,0,0,0,0,0,0, # 90 - 97 0,0,0,0,0,0,0,0, # 98 - 9f 0,3,4,4,4,4,4,4, # a0 - a7 5,5,1,1,1,1,1,1, # a8 - af 1,1,1,1,1,1,1,1, # b0 - b7 1,1,1,1,1,1,1,1, # b8 - bf 1,1,3,1,3,3,3,3, # c0 - c7 3,3,3,3,3,3,3,3, # c8 - cf 3,3,3,3,3,3,3,3, # d0 - d7 3,3,3,3,3,3,3,3, # d8 - df 3,3,3,3,3,3,3,3, # e0 - e7 3,3,3,3,3,3,3,3, # e8 - ef 3,3,3,3,3,3,3,3, # f0 - f7 3,3,3,3,3,3,3,0 # f8 - ff ) EUCTW_ST = ( MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f ) EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, 'class_factor': 7, 'state_table': EUCTW_ST, 'char_len_table': EUCTW_CHAR_LEN_TABLE, 'name': 'x-euc-tw'} # GB2312 GB2312_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 3,3,3,3,3,3,3,3, # 30 - 37 3,3,1,1,1,1,1,1, # 38 - 3f 2,2,2,2,2,2,2,2, # 40 - 47 2,2,2,2,2,2,2,2, # 48 - 4f 2,2,2,2,2,2,2,2, # 50 - 57 2,2,2,2,2,2,2,2, # 58 - 5f 2,2,2,2,2,2,2,2, # 60 - 67 2,2,2,2,2,2,2,2, # 68 - 6f 2,2,2,2,2,2,2,2, # 70 - 77 2,2,2,2,2,2,2,4, # 78 - 7f 5,6,6,6,6,6,6,6, # 80 - 87 6,6,6,6,6,6,6,6, # 88 - 8f 6,6,6,6,6,6,6,6, # 90 - 97 6,6,6,6,6,6,6,6, # 98 - 9f 6,6,6,6,6,6,6,6, # a0 - a7 6,6,6,6,6,6,6,6, # a8 - af 6,6,6,6,6,6,6,6, # b0 - b7 6,6,6,6,6,6,6,6, # b8 - bf 6,6,6,6,6,6,6,6, # c0 - c7 6,6,6,6,6,6,6,6, # c8 - cf 6,6,6,6,6,6,6,6, # d0 - d7 6,6,6,6,6,6,6,6, # d8 - df 6,6,6,6,6,6,6,6, # e0 - e7 6,6,6,6,6,6,6,6, # e8 - ef 6,6,6,6,6,6,6,6, # f0 - f7 6,6,6,6,6,6,6,0 # f8 - ff ) GB2312_ST = ( MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f ) # To be accurate, the length of class 6 can be either 2 or 4. # But it is not necessary to discriminate between the two since # it is used for frequency analysis only, and we are validating # each code range there as well. So it is safe to set it to be # 2 here. GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) GB2312_SM_MODEL = {'class_table': GB2312_CLS, 'class_factor': 7, 'state_table': GB2312_ST, 'char_len_table': GB2312_CHAR_LEN_TABLE, 'name': 'GB2312'} # Shift_JIS SJIS_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 1,1,1,1,1,1,1,1, # 30 - 37 1,1,1,1,1,1,1,1, # 38 - 3f 2,2,2,2,2,2,2,2, # 40 - 47 2,2,2,2,2,2,2,2, # 48 - 4f 2,2,2,2,2,2,2,2, # 50 - 57 2,2,2,2,2,2,2,2, # 58 - 5f 2,2,2,2,2,2,2,2, # 60 - 67 2,2,2,2,2,2,2,2, # 68 - 6f 2,2,2,2,2,2,2,2, # 70 - 77 2,2,2,2,2,2,2,1, # 78 - 7f 3,3,3,3,3,2,2,3, # 80 - 87 3,3,3,3,3,3,3,3, # 88 - 8f 3,3,3,3,3,3,3,3, # 90 - 97 3,3,3,3,3,3,3,3, # 98 - 9f #0xa0 is illegal in sjis encoding, but some pages does #contain such byte. We need to be more error forgiven. 2,2,2,2,2,2,2,2, # a0 - a7 2,2,2,2,2,2,2,2, # a8 - af 2,2,2,2,2,2,2,2, # b0 - b7 2,2,2,2,2,2,2,2, # b8 - bf 2,2,2,2,2,2,2,2, # c0 - c7 2,2,2,2,2,2,2,2, # c8 - cf 2,2,2,2,2,2,2,2, # d0 - d7 2,2,2,2,2,2,2,2, # d8 - df 3,3,3,3,3,3,3,3, # e0 - e7 3,3,3,3,3,4,4,4, # e8 - ef 3,3,3,3,3,3,3,3, # f0 - f7 3,3,3,3,3,0,0,0) # f8 - ff SJIS_ST = ( MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 ) SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) SJIS_SM_MODEL = {'class_table': SJIS_CLS, 'class_factor': 6, 'state_table': SJIS_ST, 'char_len_table': SJIS_CHAR_LEN_TABLE, 'name': 'Shift_JIS'} # UCS2-BE UCS2BE_CLS = ( 0,0,0,0,0,0,0,0, # 00 - 07 0,0,1,0,0,2,0,0, # 08 - 0f 0,0,0,0,0,0,0,0, # 10 - 17 0,0,0,3,0,0,0,0, # 18 - 1f 0,0,0,0,0,0,0,0, # 20 - 27 0,3,3,3,3,3,0,0, # 28 - 2f 0,0,0,0,0,0,0,0, # 30 - 37 0,0,0,0,0,0,0,0, # 38 - 3f 0,0,0,0,0,0,0,0, # 40 - 47 0,0,0,0,0,0,0,0, # 48 - 4f 0,0,0,0,0,0,0,0, # 50 - 57 0,0,0,0,0,0,0,0, # 58 - 5f 0,0,0,0,0,0,0,0, # 60 - 67 0,0,0,0,0,0,0,0, # 68 - 6f 0,0,0,0,0,0,0,0, # 70 - 77 0,0,0,0,0,0,0,0, # 78 - 7f 0,0,0,0,0,0,0,0, # 80 - 87 0,0,0,0,0,0,0,0, # 88 - 8f 0,0,0,0,0,0,0,0, # 90 - 97 0,0,0,0,0,0,0,0, # 98 - 9f 0,0,0,0,0,0,0,0, # a0 - a7 0,0,0,0,0,0,0,0, # a8 - af 0,0,0,0,0,0,0,0, # b0 - b7 0,0,0,0,0,0,0,0, # b8 - bf 0,0,0,0,0,0,0,0, # c0 - c7 0,0,0,0,0,0,0,0, # c8 - cf 0,0,0,0,0,0,0,0, # d0 - d7 0,0,0,0,0,0,0,0, # d8 - df 0,0,0,0,0,0,0,0, # e0 - e7 0,0,0,0,0,0,0,0, # e8 - ef 0,0,0,0,0,0,0,0, # f0 - f7 0,0,0,0,0,0,4,5 # f8 - ff ) UCS2BE_ST = ( 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 ) UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, 'class_factor': 6, 'state_table': UCS2BE_ST, 'char_len_table': UCS2BE_CHAR_LEN_TABLE, 'name': 'UTF-16BE'} # UCS2-LE UCS2LE_CLS = ( 0,0,0,0,0,0,0,0, # 00 - 07 0,0,1,0,0,2,0,0, # 08 - 0f 0,0,0,0,0,0,0,0, # 10 - 17 0,0,0,3,0,0,0,0, # 18 - 1f 0,0,0,0,0,0,0,0, # 20 - 27 0,3,3,3,3,3,0,0, # 28 - 2f 0,0,0,0,0,0,0,0, # 30 - 37 0,0,0,0,0,0,0,0, # 38 - 3f 0,0,0,0,0,0,0,0, # 40 - 47 0,0,0,0,0,0,0,0, # 48 - 4f 0,0,0,0,0,0,0,0, # 50 - 57 0,0,0,0,0,0,0,0, # 58 - 5f 0,0,0,0,0,0,0,0, # 60 - 67 0,0,0,0,0,0,0,0, # 68 - 6f 0,0,0,0,0,0,0,0, # 70 - 77 0,0,0,0,0,0,0,0, # 78 - 7f 0,0,0,0,0,0,0,0, # 80 - 87 0,0,0,0,0,0,0,0, # 88 - 8f 0,0,0,0,0,0,0,0, # 90 - 97 0,0,0,0,0,0,0,0, # 98 - 9f 0,0,0,0,0,0,0,0, # a0 - a7 0,0,0,0,0,0,0,0, # a8 - af 0,0,0,0,0,0,0,0, # b0 - b7 0,0,0,0,0,0,0,0, # b8 - bf 0,0,0,0,0,0,0,0, # c0 - c7 0,0,0,0,0,0,0,0, # c8 - cf 0,0,0,0,0,0,0,0, # d0 - d7 0,0,0,0,0,0,0,0, # d8 - df 0,0,0,0,0,0,0,0, # e0 - e7 0,0,0,0,0,0,0,0, # e8 - ef 0,0,0,0,0,0,0,0, # f0 - f7 0,0,0,0,0,0,4,5 # f8 - ff ) UCS2LE_ST = ( 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 ) UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, 'class_factor': 6, 'state_table': UCS2LE_ST, 'char_len_table': UCS2LE_CHAR_LEN_TABLE, 'name': 'UTF-16LE'} # UTF-8 UTF8_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 1,1,1,1,1,1,1,1, # 30 - 37 1,1,1,1,1,1,1,1, # 38 - 3f 1,1,1,1,1,1,1,1, # 40 - 47 1,1,1,1,1,1,1,1, # 48 - 4f 1,1,1,1,1,1,1,1, # 50 - 57 1,1,1,1,1,1,1,1, # 58 - 5f 1,1,1,1,1,1,1,1, # 60 - 67 1,1,1,1,1,1,1,1, # 68 - 6f 1,1,1,1,1,1,1,1, # 70 - 77 1,1,1,1,1,1,1,1, # 78 - 7f 2,2,2,2,3,3,3,3, # 80 - 87 4,4,4,4,4,4,4,4, # 88 - 8f 4,4,4,4,4,4,4,4, # 90 - 97 4,4,4,4,4,4,4,4, # 98 - 9f 5,5,5,5,5,5,5,5, # a0 - a7 5,5,5,5,5,5,5,5, # a8 - af 5,5,5,5,5,5,5,5, # b0 - b7 5,5,5,5,5,5,5,5, # b8 - bf 0,0,6,6,6,6,6,6, # c0 - c7 6,6,6,6,6,6,6,6, # c8 - cf 6,6,6,6,6,6,6,6, # d0 - d7 6,6,6,6,6,6,6,6, # d8 - df 7,8,8,8,8,8,8,8, # e0 - e7 8,8,8,8,8,9,8,8, # e8 - ef 10,11,11,11,11,11,11,11, # f0 - f7 12,13,13,13,14,15,0,0 # f8 - ff ) UTF8_ST = ( MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 9, 11, 8, 7, 6, 5, 4, 3,#08-0f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf ) UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) UTF8_SM_MODEL = {'class_table': UTF8_CLS, 'class_factor': 16, 'state_table': UTF8_ST, 'char_len_table': UTF8_CHAR_LEN_TABLE, 'name': 'UTF-8'}
Django-locallibrary/env/Lib/site-packages/pip/_vendor/chardet/mbcssm.py/0
{ "file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/chardet/mbcssm.py", "repo_id": "Django-locallibrary", "token_count": 14532 }
19
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. import atexit import contextlib import sys from .ansitowin32 import AnsiToWin32 orig_stdout = None orig_stderr = None wrapped_stdout = None wrapped_stderr = None atexit_done = False def reset_all(): if AnsiToWin32 is not None: # Issue #74: objects might become None at exit AnsiToWin32(orig_stdout).reset_all() def init(autoreset=False, convert=None, strip=None, wrap=True): if not wrap and any([autoreset, convert, strip]): raise ValueError('wrap=False conflicts with any other arg=True') global wrapped_stdout, wrapped_stderr global orig_stdout, orig_stderr orig_stdout = sys.stdout orig_stderr = sys.stderr if sys.stdout is None: wrapped_stdout = None else: sys.stdout = wrapped_stdout = \ wrap_stream(orig_stdout, convert, strip, autoreset, wrap) if sys.stderr is None: wrapped_stderr = None else: sys.stderr = wrapped_stderr = \ wrap_stream(orig_stderr, convert, strip, autoreset, wrap) global atexit_done if not atexit_done: atexit.register(reset_all) atexit_done = True def deinit(): if orig_stdout is not None: sys.stdout = orig_stdout if orig_stderr is not None: sys.stderr = orig_stderr @contextlib.contextmanager def colorama_text(*args, **kwargs): init(*args, **kwargs) try: yield finally: deinit() def reinit(): if wrapped_stdout is not None: sys.stdout = wrapped_stdout if wrapped_stderr is not None: sys.stderr = wrapped_stderr def wrap_stream(stream, convert, strip, autoreset, wrap): if wrap: wrapper = AnsiToWin32(stream, convert=convert, strip=strip, autoreset=autoreset) if wrapper.should_wrap(): stream = wrapper.stream return stream
Django-locallibrary/env/Lib/site-packages/pip/_vendor/colorama/initialise.py/0
{ "file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/colorama/initialise.py", "repo_id": "Django-locallibrary", "token_count": 804 }
20
README.md exists but content is empty. Use the Edit dataset card button to edit it.
Downloads last month
0
Edit dataset card