hexsha
stringlengths 40
40
| size
int64 2
1.05M
| ext
stringclasses 9
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
193
| max_stars_repo_name
stringlengths 6
109
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
36.6k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
193
| max_issues_repo_name
stringlengths 6
109
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
29.8k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
193
| max_forks_repo_name
stringlengths 6
109
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
11.2k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.05M
| avg_line_length
float64 1
404k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
73ff7bd7349b1b6ec7c593fdf7678510edc5171f | 7,931 | py | Python | allocation/tests/conftest.py | iivoraitahila/tilavarauspalvelu-core | ed809dd74dffc1e21141564a005fcdf138a49448 | [
"MIT"
] | null | null | null | allocation/tests/conftest.py | iivoraitahila/tilavarauspalvelu-core | ed809dd74dffc1e21141564a005fcdf138a49448 | [
"MIT"
] | 90 | 2020-11-13T07:42:32.000Z | 2022-03-29T08:54:20.000Z | allocation/tests/conftest.py | iivoraitahila/tilavarauspalvelu-core | ed809dd74dffc1e21141564a005fcdf138a49448 | [
"MIT"
] | 8 | 2021-02-10T11:31:22.000Z | 2022-01-28T14:33:47.000Z | import datetime
from typing import Dict
import pytest
from django.utils import timezone
from applications.models import (
Application,
ApplicationEvent,
ApplicationEventSchedule,
ApplicationEventScheduleResult,
ApplicationRound,
ApplicationRoundBasket,
EventReservationUnit,
)
from reservation_units.models import Purpose, ReservationUnit
from spaces.models import Space
@pytest.fixture(autouse=True)
def setup_audit_log(settings):
settings.AUDIT_LOGGING_ENABLED = False
@pytest.fixture(autouse=True)
def disable_hauki_api(settings):
settings.HAUKI_API_URL = None
def get_default_start() -> datetime.date:
return datetime.date(year=2020, month=1, day=1)
def get_default_end() -> datetime.date:
return datetime.date(year=2020, month=1, day=31)
def get_default_start_datetime() -> datetime.date:
return timezone.datetime.combine(
get_default_start(), datetime.datetime.min.time()
).astimezone()
def get_default_end_datetime() -> datetime.date:
return timezone.datetime.combine(
get_default_end(), datetime.datetime.max.time()
).astimezone()
@pytest.fixture
def default_application_round() -> ApplicationRound:
return ApplicationRound.objects.create(
application_period_begin=get_default_start_datetime(),
application_period_end=get_default_end_datetime(),
reservation_period_begin=get_default_start(),
reservation_period_end=get_default_end(),
public_display_begin=get_default_start_datetime(),
public_display_end=get_default_end_datetime(),
)
@pytest.fixture
def space_for_15_persons():
return Space.objects.create(name="Space", max_persons=15)
@pytest.fixture
def space_for_5_persons():
return Space.objects.create(name="Space", max_persons=5)
@pytest.fixture
def reservation_unit(space_for_15_persons) -> ReservationUnit:
reservation_unit = ReservationUnit.objects.create(
name_en="Test reservation unit", require_introduction=False
)
reservation_unit.spaces.set([space_for_15_persons])
return reservation_unit
@pytest.fixture
def second_reservation_unit(space_for_5_persons):
reservation_unit = ReservationUnit.objects.create(
name_en="Second test reservation unit",
require_introduction=False,
)
reservation_unit.spaces.set([space_for_5_persons])
return reservation_unit
@pytest.fixture
def application_round_with_reservation_units(
reservation_unit, default_application_round
) -> ApplicationRound:
default_application_round.reservation_units.set([reservation_unit])
return default_application_round
@pytest.fixture
def minimal_application(default_application_round) -> Application:
return Application.objects.create(application_round_id=default_application_round.id)
@pytest.fixture
def application_with_reservation_units(
default_application_round,
) -> Application:
return Application.objects.create(application_round_id=default_application_round.id)
@pytest.fixture
def application_with_application_events(
default_application_round,
) -> Application:
return Application.objects.create(application_round_id=default_application_round.id)
@pytest.fixture
def recurring_application_event(
application_with_reservation_units, purpose
) -> ApplicationEvent:
return ApplicationEvent.objects.create(
application=application_with_reservation_units,
num_persons=10,
min_duration=datetime.timedelta(hours=1),
max_duration=datetime.timedelta(hours=2),
name="Football",
events_per_week=2,
begin=datetime.date(year=2020, month=1, day=1),
end=datetime.date(year=2020, month=2, day=28),
biweekly=False,
purpose=purpose,
)
@pytest.fixture
def scheduled_for_monday(recurring_application_event) -> ApplicationEventSchedule:
return ApplicationEventSchedule.objects.create(
day=0, begin="10:00", end="12:00", application_event=recurring_application_event
)
@pytest.fixture
def result_scheduled_for_monday(scheduled_for_monday, reservation_unit):
return ApplicationEventScheduleResult.objects.create(
application_event_schedule=scheduled_for_monday,
accepted=True,
allocated_reservation_unit=reservation_unit,
allocated_duration="01:00",
allocated_begin="10:00",
allocated_end="11:00",
allocated_day=0,
)
@pytest.fixture
def matching_event_reservation_unit(
recurring_application_event, reservation_unit
) -> EventReservationUnit:
return EventReservationUnit.objects.create(
priority=100,
application_event=recurring_application_event,
reservation_unit=reservation_unit,
)
@pytest.fixture
def not_matching_event_reservation_unit(
recurring_application_event, second_reservation_unit
) -> EventReservationUnit:
return EventReservationUnit.objects.create(
priority=100,
application_event=recurring_application_event,
reservation_unit=second_reservation_unit,
)
@pytest.fixture
def purpose() -> Purpose:
return Purpose.objects.create(name="Exercise")
@pytest.fixture
def purpose2() -> Purpose:
return Purpose.objects.create(name="Playing sports")
@pytest.fixture
def multiple_applications(
application_round_with_reservation_units, request, reservation_unit, purpose
) -> Dict[str, list]:
applications = []
created_events = []
schedules = []
for application in request.param["applications"]:
created_application = Application.objects.create(
application_round_id=application_round_with_reservation_units.id
)
applications.append(created_application)
for event in application["events"]:
created_event = ApplicationEvent.objects.create(
application=created_application,
num_persons=15,
min_duration=datetime.timedelta(minutes=event["duration"]),
max_duration=datetime.timedelta(minutes=event["duration"]),
name="Football",
events_per_week=event["events_per_week"],
begin=datetime.date(year=2020, month=1, day=1),
end=datetime.date(year=2020, month=2, day=28),
biweekly=False,
purpose=purpose,
)
for schedule in event["schedules"]:
created_schedule = ApplicationEventSchedule.objects.create(
day=schedule["day"],
begin=schedule["start"] if "start" in schedule else "10:00",
end=schedule["end"] if "end" in schedule else "22:00",
application_event=created_event,
)
schedules.append(created_schedule)
created_events.append(created_event)
EventReservationUnit.objects.create(
priority=100,
application_event=created_event,
reservation_unit=reservation_unit,
)
return {
"applications": applications,
"created_events": created_events,
"schedules": schedules,
}
@pytest.fixture
def application_round_basket_one(
default_application_round, purpose
) -> ApplicationRoundBasket:
basket = ApplicationRoundBasket.objects.create(
name="Basket with order number one",
application_round=default_application_round,
order_number=1,
customer_type=[],
)
basket.purposes.set([purpose])
return basket
@pytest.fixture
def application_round_basket_two(
default_application_round, purpose
) -> ApplicationRoundBasket:
basket = ApplicationRoundBasket.objects.create(
name="Basket with order number two",
application_round=default_application_round,
order_number=2,
customer_type=[],
)
basket.purposes.set([purpose])
return basket
| 30.041667 | 88 | 0.718573 |
73ff8fa69e195f48ad5a8a312549d714ad03b5a7 | 12,717 | py | Python | autotest/ogr/ogr_rfc35_mem.py | chambbj/gdal | 3d56aecb5b8e9890dae8f560acd099992e707d12 | [
"MIT"
] | 1 | 2015-02-16T16:51:38.000Z | 2015-02-16T16:51:38.000Z | autotest/ogr/ogr_rfc35_mem.py | theduckylittle/gdal | 61be261cae524582ba28bceebb027cc1e967e0ab | [
"MIT"
] | null | null | null | autotest/ogr/ogr_rfc35_mem.py | theduckylittle/gdal | 61be261cae524582ba28bceebb027cc1e967e0ab | [
"MIT"
] | null | null | null | #!/usr/bin/env python
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test RFC35 for Memory driver
# Author: Even Rouault <even dot rouault at mines dash paris dot org>
#
###############################################################################
# Copyright (c) 2011, Even Rouault <even dot rouault at mines dash paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import os
import sys
import string
sys.path.append( '../pymod' )
import gdaltest
import ogrtest
from osgeo import ogr
from osgeo import gdal
###############################################################################
# Initiate the test file
def ogr_rfc35_mem_1():
gdaltest.rfc35_mem_ds = ogr.GetDriverByName('Memory').CreateDataSource('rfc35_test')
lyr = gdaltest.rfc35_mem_ds.CreateLayer('rfc35_test')
lyr.ReorderFields([])
fd = ogr.FieldDefn('foo5', ogr.OFTString)
fd.SetWidth(5)
lyr.CreateField(fd)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetField(0, 'foo0')
lyr.CreateFeature(feat)
feat = None
fd = ogr.FieldDefn('bar10', ogr.OFTString)
fd.SetWidth(10)
lyr.CreateField(fd)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetField(0, 'foo1')
feat.SetField(1, 'bar1')
lyr.CreateFeature(feat)
feat = None
fd = ogr.FieldDefn('baz15', ogr.OFTString)
fd.SetWidth(15)
lyr.CreateField(fd)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetField(0, 'foo2')
feat.SetField(1, 'bar2_01234')
feat.SetField(2, 'baz2_0123456789')
lyr.CreateFeature(feat)
feat = None
fd = ogr.FieldDefn('baw20', ogr.OFTString)
fd.SetWidth(20)
lyr.CreateField(fd)
return 'success'
###############################################################################
# Test ReorderField()
def Truncate(val, lyr_defn, fieldname):
#if val is None:
# return val
#return val[0:lyr_defn.GetFieldDefn(lyr_defn.GetFieldIndex(fieldname)).GetWidth()]
# Mem driver doesn't actually truncate
return val
def CheckFeatures(lyr, foo = 'foo5', bar = 'bar10', baz = 'baz15', baw = 'baw20'):
expected_values = [
[ 'foo0', None, None, None ],
[ 'foo1', 'bar1', None, None ],
[ 'foo2', 'bar2_01234', 'baz2_0123456789', None ],
[ 'foo3', 'bar3_01234', 'baz3_0123456789', 'baw3_012345678901234' ]
]
lyr_defn = lyr.GetLayerDefn()
lyr.ResetReading()
feat = lyr.GetNextFeature()
i = 0
while feat is not None:
if (foo is not None and feat.GetField(foo) != Truncate(expected_values[i][0], lyr_defn, foo)) or \
(bar is not None and feat.GetField(bar) != Truncate(expected_values[i][1], lyr_defn, bar)) or \
(baz is not None and feat.GetField(baz) != Truncate(expected_values[i][2], lyr_defn, baz)) or \
(baw is not None and feat.GetField(baw) != Truncate(expected_values[i][3], lyr_defn, baw)):
feat.DumpReadable()
return 'fail'
feat = lyr.GetNextFeature()
i = i + 1
return 'success'
def CheckColumnOrder(lyr, expected_order):
lyr_defn = lyr.GetLayerDefn()
for i in range(len(expected_order)):
if lyr_defn.GetFieldDefn(i).GetName() != expected_order[i]:
return 'fail'
return 'success'
def Check(lyr, expected_order):
ret = CheckColumnOrder(lyr, expected_order)
if ret != 'success':
return ret
ret = CheckFeatures(lyr)
if ret != 'success':
return ret
return 'success'
def ogr_rfc35_mem_2():
lyr = gdaltest.rfc35_mem_ds.GetLayer(0)
if lyr.TestCapability(ogr.OLCReorderFields) != 1:
return 'fail'
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetField(0, 'foo3')
feat.SetField(1, 'bar3_01234')
feat.SetField(2, 'baz3_0123456789')
feat.SetField(3, 'baw3_012345678901234')
lyr.CreateFeature(feat)
feat = None
if lyr.ReorderField(1,3) != 0:
return 'fail'
ret = Check(lyr, ['foo5', 'baz15', 'baw20', 'bar10'])
if ret != 'success':
return ret
lyr.ReorderField(3,1)
ret = Check(lyr, ['foo5', 'bar10', 'baz15', 'baw20'])
if ret != 'success':
return ret
lyr.ReorderField(0,2)
ret = Check(lyr, ['bar10', 'baz15', 'foo5', 'baw20'])
if ret != 'success':
return ret
lyr.ReorderField(2,0)
ret = Check(lyr, ['foo5', 'bar10', 'baz15', 'baw20'])
if ret != 'success':
return ret
lyr.ReorderField(0,1)
ret = Check(lyr, ['bar10', 'foo5', 'baz15', 'baw20'])
if ret != 'success':
return ret
lyr.ReorderField(1,0)
ret = Check(lyr, ['foo5', 'bar10', 'baz15', 'baw20'])
if ret != 'success':
return ret
lyr.ReorderFields([3,2,1,0])
ret = Check(lyr, ['baw20', 'baz15', 'bar10', 'foo5'])
if ret != 'success':
return ret
lyr.ReorderFields([3,2,1,0])
ret = Check(lyr, ['foo5', 'bar10', 'baz15', 'baw20'])
if ret != 'success':
return ret
gdal.PushErrorHandler('CPLQuietErrorHandler')
ret = lyr.ReorderFields([0,0,0,0])
gdal.PopErrorHandler()
if ret == 0:
return 'fail'
return 'success'
###############################################################################
# Test AlterFieldDefn() for change of name and width
def ogr_rfc35_mem_3():
lyr = gdaltest.rfc35_mem_ds.GetLayer(0)
fd = ogr.FieldDefn("baz25", ogr.OFTString)
fd.SetWidth(25)
lyr_defn = lyr.GetLayerDefn()
gdal.PushErrorHandler('CPLQuietErrorHandler')
ret = lyr.AlterFieldDefn(-1, fd, ogr.ALTER_ALL_FLAG)
gdal.PopErrorHandler()
if ret == 0:
return 'fail'
gdal.PushErrorHandler('CPLQuietErrorHandler')
ret = lyr.AlterFieldDefn(lyr_defn.GetFieldCount(), fd, ogr.ALTER_ALL_FLAG)
gdal.PopErrorHandler()
if ret == 0:
return 'fail'
lyr.AlterFieldDefn(lyr_defn.GetFieldIndex("baz15"), fd, ogr.ALTER_ALL_FLAG)
expected_values = [
[ 'foo0', None, None, None ],
[ 'foo1', 'bar1', None, None ],
[ 'foo2', 'bar2_01234', 'baz2_0123456789', None ],
[ 'foo3', 'bar3_01234', 'baz3_0123456789', 'baw3_012345678901234' ]
]
ret = CheckFeatures(lyr, baz = 'baz25')
if ret != 'success':
return ret
fd = ogr.FieldDefn("baz5", ogr.OFTString)
fd.SetWidth(5)
lyr_defn = lyr.GetLayerDefn()
lyr.AlterFieldDefn(lyr_defn.GetFieldIndex("baz25"), fd, ogr.ALTER_ALL_FLAG)
ret = CheckFeatures(lyr, baz = 'baz5')
if ret != 'success':
return ret
lyr_defn = lyr.GetLayerDefn()
fld_defn = lyr_defn.GetFieldDefn(lyr_defn.GetFieldIndex('baz5'))
if fld_defn.GetWidth() != 5:
return 'fail'
ret = CheckFeatures(lyr, baz = 'baz5')
if ret != 'success':
return ret
return 'success'
###############################################################################
# Test AlterFieldDefn() for change of type
def ogr_rfc35_mem_4():
lyr = gdaltest.rfc35_mem_ds.GetLayer(0)
lyr_defn = lyr.GetLayerDefn()
if lyr.TestCapability(ogr.OLCAlterFieldDefn) != 1:
return 'fail'
fd = ogr.FieldDefn("intfield", ogr.OFTInteger)
lyr.CreateField(fd)
lyr.ReorderField(lyr_defn.GetFieldIndex("intfield"), 0)
lyr.ResetReading()
feat = lyr.GetNextFeature()
feat.SetField("intfield", 12345)
lyr.SetFeature(feat)
feat = None
fd.SetWidth(10)
lyr.AlterFieldDefn(lyr_defn.GetFieldIndex("intfield"), fd, ogr.ALTER_ALL_FLAG)
lyr.ResetReading()
feat = lyr.GetNextFeature()
if feat.GetField("intfield") != 12345:
return 'fail'
feat = None
ret = CheckFeatures(lyr, baz = 'baz5')
if ret != 'success':
return ret
fd.SetWidth(5)
lyr.AlterFieldDefn(lyr_defn.GetFieldIndex("intfield"), fd, ogr.ALTER_ALL_FLAG)
lyr.ResetReading()
feat = lyr.GetNextFeature()
if feat.GetField("intfield") != 12345:
return 'fail'
feat = None
ret = CheckFeatures(lyr, baz = 'baz5')
if ret != 'success':
return ret
fd.SetWidth(4)
lyr.AlterFieldDefn(lyr_defn.GetFieldIndex("intfield"), fd, ogr.ALTER_ALL_FLAG)
lyr.ResetReading()
feat = lyr.GetNextFeature()
#if feat.GetField("intfield") != 1234:
if feat.GetField("intfield") != 12345:
return 'fail'
feat = None
ret = CheckFeatures(lyr, baz = 'baz5')
if ret != 'success':
return ret
fd = ogr.FieldDefn("oldintfld", ogr.OFTString)
fd.SetWidth(15)
lyr.AlterFieldDefn(lyr_defn.GetFieldIndex("intfield"), fd, ogr.ALTER_ALL_FLAG)
lyr.ResetReading()
feat = lyr.GetNextFeature()
#if feat.GetField("oldintfld") != '1234':
if feat.GetField("oldintfld") != '12345':
return 'fail'
feat = None
ret = CheckFeatures(lyr, baz = 'baz5')
if ret != 'success':
return ret
lyr.DeleteField(lyr_defn.GetFieldIndex("oldintfld"))
fd = ogr.FieldDefn("intfield", ogr.OFTInteger)
fd.SetWidth(10)
if lyr.CreateField(fd) != 0:
return 'fail'
if lyr.ReorderField(lyr_defn.GetFieldIndex("intfield"), 0) != 0:
return 'fail'
lyr.ResetReading()
feat = lyr.GetNextFeature()
feat.SetField("intfield", 98765)
if lyr.SetFeature(feat) != 0:
return 'fail'
feat = None
fd = ogr.FieldDefn("oldintfld", ogr.OFTString)
fd.SetWidth(6)
lyr.AlterFieldDefn(lyr_defn.GetFieldIndex("intfield"), fd, ogr.ALTER_ALL_FLAG)
lyr.ResetReading()
feat = lyr.GetNextFeature()
if feat.GetField("oldintfld") != '98765':
return 'fail'
feat = None
ret = CheckFeatures(lyr, baz = 'baz5')
if ret != 'success':
return ret
return 'success'
###############################################################################
# Test DeleteField()
def ogr_rfc35_mem_5():
lyr = gdaltest.rfc35_mem_ds.GetLayer(0)
lyr_defn = lyr.GetLayerDefn()
if lyr.TestCapability(ogr.OLCDeleteField) != 1:
return 'fail'
gdal.PushErrorHandler('CPLQuietErrorHandler')
ret = lyr.DeleteField(-1)
gdal.PopErrorHandler()
if ret == 0:
return 'fail'
gdal.PushErrorHandler('CPLQuietErrorHandler')
ret = lyr.DeleteField(lyr.GetLayerDefn().GetFieldCount())
gdal.PopErrorHandler()
if ret == 0:
return 'fail'
if lyr.DeleteField(0) != 0:
return 'fail'
ret = CheckFeatures(lyr, baz = 'baz5')
if ret != 'success':
return ret
if lyr.DeleteField(lyr_defn.GetFieldIndex('baw20')) != 0:
return 'fail'
ret = CheckFeatures(lyr, baz = 'baz5', baw = None)
if ret != 'success':
return ret
if lyr.DeleteField(lyr_defn.GetFieldIndex('baz5')) != 0:
return 'fail'
ret = CheckFeatures(lyr, baz = None, baw = None)
if ret != 'success':
return ret
if lyr.DeleteField(lyr_defn.GetFieldIndex('foo5')) != 0:
return 'fail'
if lyr.DeleteField(lyr_defn.GetFieldIndex('bar10')) != 0:
return 'fail'
ret = CheckFeatures(lyr, foo = None, bar = None, baz = None, baw = None)
if ret != 'success':
return ret
return 'success'
###############################################################################
# Initiate the test file
def ogr_rfc35_mem_cleanup():
gdaltest.rfc35_mem_ds = None
return 'success'
gdaltest_list = [
ogr_rfc35_mem_1,
ogr_rfc35_mem_2,
ogr_rfc35_mem_3,
ogr_rfc35_mem_4,
ogr_rfc35_mem_5,
ogr_rfc35_mem_cleanup ]
if __name__ == '__main__':
gdaltest.setup_run( 'ogr_rfc35_mem' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
| 27.645652 | 106 | 0.603916 |
73ffa14cbec420082e5db045ae75ee6d02a164e5 | 16,774 | py | Python | plugins/xenserver/xenapi/etc/xapi.d/plugins/utils.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | 5 | 2016-04-28T16:20:38.000Z | 2021-04-25T11:19:03.000Z | plugins/xenserver/xenapi/etc/xapi.d/plugins/utils.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | null | null | null | plugins/xenserver/xenapi/etc/xapi.d/plugins/utils.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | 5 | 2020-04-08T20:24:45.000Z | 2020-10-05T19:02:13.000Z | # Copyright (c) 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE: XenServer still only supports Python 2.4 in it's dom0 userspace
# which means the Nova xenapi plugins must use only Python 2.4 features
"""Various utilities used by XenServer plugins."""
try:
import cPickle as pickle
except ImportError:
import pickle
import errno
import logging
import os
import shutil
import signal
import subprocess
import tempfile
import XenAPIPlugin
LOG = logging.getLogger(__name__)
CHUNK_SIZE = 8192
class CommandNotFound(Exception):
pass
def delete_if_exists(path):
try:
os.unlink(path)
except OSError, e: # noqa
if e.errno == errno.ENOENT:
LOG.warning("'%s' was already deleted, skipping delete" % path)
else:
raise
def _link(src, dst):
LOG.info("Hard-linking file '%s' -> '%s'" % (src, dst))
os.link(src, dst)
def _rename(src, dst):
LOG.info("Renaming file '%s' -> '%s'" % (src, dst))
try:
os.rename(src, dst)
except OSError, e: # noqa
if e.errno == errno.EXDEV:
LOG.error("Invalid cross-device link. Perhaps %s and %s should "
"be symlinked on the same filesystem?" % (src, dst))
raise
def make_subprocess(cmdline, stdout=False, stderr=False, stdin=False,
universal_newlines=False, close_fds=True, env=None):
"""Make a subprocess according to the given command-line string
"""
LOG.info("Running cmd '%s'" % " ".join(cmdline))
kwargs = {}
kwargs['stdout'] = stdout and subprocess.PIPE or None
kwargs['stderr'] = stderr and subprocess.PIPE or None
kwargs['stdin'] = stdin and subprocess.PIPE or None
kwargs['universal_newlines'] = universal_newlines
kwargs['close_fds'] = close_fds
kwargs['env'] = env
try:
proc = subprocess.Popen(cmdline, **kwargs)
except OSError, e: # noqa
if e.errno == errno.ENOENT:
raise CommandNotFound
else:
raise
return proc
class SubprocessException(Exception):
def __init__(self, cmdline, ret, out, err):
Exception.__init__(self, "'%s' returned non-zero exit code: "
"retcode=%i, out='%s', stderr='%s'"
% (cmdline, ret, out, err))
self.cmdline = cmdline
self.ret = ret
self.out = out
self.err = err
def finish_subprocess(proc, cmdline, cmd_input=None, ok_exit_codes=None):
"""Ensure that the process returned a zero exit code indicating success
"""
if ok_exit_codes is None:
ok_exit_codes = [0]
out, err = proc.communicate(cmd_input)
ret = proc.returncode
if ret not in ok_exit_codes:
LOG.error("Command '%(cmdline)s' with process id '%(pid)s' expected "
"return code in '%(ok)s' but got '%(rc)s': %(err)s" %
{'cmdline': cmdline, 'pid': proc.pid, 'ok': ok_exit_codes,
'rc': ret, 'err': err})
raise SubprocessException(' '.join(cmdline), ret, out, err)
return out
def run_command(cmd, cmd_input=None, ok_exit_codes=None):
"""Abstracts out the basics of issuing system commands. If the command
returns anything in stderr, an exception is raised with that information.
Otherwise, the output from stdout is returned.
cmd_input is passed to the process on standard input.
"""
proc = make_subprocess(cmd, stdout=True, stderr=True, stdin=True,
close_fds=True)
return finish_subprocess(proc, cmd, cmd_input=cmd_input,
ok_exit_codes=ok_exit_codes)
def try_kill_process(proc):
"""Sends the given process the SIGKILL signal."""
pid = proc.pid
LOG.info("Killing process %s" % pid)
try:
os.kill(pid, signal.SIGKILL)
except Exception:
LOG.exception("Failed to kill %s" % pid)
def make_staging_area(sr_path):
"""The staging area is a place where we can temporarily store and
manipulate VHDs. The use of the staging area is different for upload and
download:
Download
========
When we download the tarball, the VHDs contained within will have names
like "snap.vhd" and "image.vhd". We need to assign UUIDs to them before
moving them into the SR. However, since 'image.vhd' may be a base_copy, we
need to link it to 'snap.vhd' (using vhd-util modify) before moving both
into the SR (otherwise the SR.scan will cause 'image.vhd' to be deleted).
The staging area gives us a place to perform these operations before they
are moved to the SR, scanned, and then registered with XenServer.
Upload
======
On upload, we want to rename the VHDs to reflect what they are, 'snap.vhd'
in the case of the snapshot VHD, and 'image.vhd' in the case of the
base_copy. The staging area provides a directory in which we can create
hard-links to rename the VHDs without affecting what's in the SR.
NOTE
====
The staging area is created as a subdirectory within the SR in order to
guarantee that it resides within the same filesystem and therefore permit
hard-linking and cheap file moves.
"""
staging_path = tempfile.mkdtemp(dir=sr_path)
return staging_path
def cleanup_staging_area(staging_path):
"""Remove staging area directory
On upload, the staging area contains hard-links to the VHDs in the SR;
it's safe to remove the staging-area because the SR will keep the link
count > 0 (so the VHDs in the SR will not be deleted).
"""
if os.path.exists(staging_path):
shutil.rmtree(staging_path)
def _handle_old_style_images(staging_path):
"""Rename files to conform to new image format, if needed.
Old-Style:
snap.vhd -> image.vhd -> base.vhd
New-Style:
0.vhd -> 1.vhd -> ... (n-1).vhd
The New-Style format has the benefit of being able to support a VDI chain
of arbitrary length.
"""
file_num = 0
for filename in ('snap.vhd', 'image.vhd', 'base.vhd'):
path = os.path.join(staging_path, filename)
if os.path.exists(path):
_rename(path, os.path.join(staging_path, "%d.vhd" % file_num))
file_num += 1
# Rename any format of name to 0.vhd when there is only single one
contents = os.listdir(staging_path)
if len(contents) == 1:
filename = contents[0]
if filename != '0.vhd' and filename.endswith('.vhd'):
_rename(
os.path.join(staging_path, filename),
os.path.join(staging_path, '0.vhd'))
def _assert_vhd_not_hidden(path):
"""Sanity check to ensure that only appropriate VHDs are marked as hidden.
If this flag is incorrectly set, then when we move the VHD into the SR, it
will be deleted out from under us.
"""
query_cmd = ["vhd-util", "query", "-n", path, "-f"]
out = run_command(query_cmd)
for line in out.splitlines():
if line.lower().startswith('hidden'):
value = line.split(':')[1].strip()
if value == "1":
raise Exception(
"VHD %s is marked as hidden without child" % path)
def _vhd_util_check(vdi_path):
check_cmd = ["vhd-util", "check", "-n", vdi_path, "-p"]
out = run_command(check_cmd, ok_exit_codes=[0, 22])
first_line = out.splitlines()[0].strip()
return out, first_line
def _validate_vhd(vdi_path):
"""This checks for several errors in the VHD structure.
Most notably, it checks that the timestamp in the footer is correct, but
may pick up other errors also.
This check ensures that the timestamps listed in the VHD footer aren't in
the future. This can occur during a migration if the clocks on the two
Dom0's are out-of-sync. This would corrupt the SR if it were imported, so
generate an exception to bail.
"""
out, first_line = _vhd_util_check(vdi_path)
if 'invalid' in first_line:
LOG.warning("VHD invalid, attempting repair.")
repair_cmd = ["vhd-util", "repair", "-n", vdi_path]
run_command(repair_cmd)
out, first_line = _vhd_util_check(vdi_path)
if 'invalid' in first_line:
if 'footer' in first_line:
part = 'footer'
elif 'header' in first_line:
part = 'header'
else:
part = 'setting'
details = first_line.split(':', 1)
if len(details) == 2:
details = details[1]
else:
details = first_line
extra = ''
if 'timestamp' in first_line:
extra = (" ensure source and destination host machines have "
"time set correctly")
LOG.info("VDI Error details: %s" % out)
raise Exception(
"VDI '%(vdi_path)s' has an invalid %(part)s: '%(details)s'"
"%(extra)s" % {'vdi_path': vdi_path, 'part': part,
'details': details, 'extra': extra})
LOG.info("VDI is valid: %s" % vdi_path)
def _validate_vdi_chain(vdi_path):
"""This check ensures that the parent pointers on the VHDs are valid
before we move the VDI chain to the SR. This is *very* important
because a bad parent pointer will corrupt the SR causing a cascade of
failures.
"""
def get_parent_path(path):
query_cmd = ["vhd-util", "query", "-n", path, "-p"]
out = run_command(query_cmd, ok_exit_codes=[0, 22])
first_line = out.splitlines()[0].strip()
if first_line.endswith(".vhd"):
return first_line
elif 'has no parent' in first_line:
return None
elif 'query failed' in first_line:
raise Exception("VDI '%s' not present which breaks"
" the VDI chain, bailing out" % path)
else:
raise Exception("Unexpected output '%s' from vhd-util" % out)
cur_path = vdi_path
while cur_path:
_validate_vhd(cur_path)
cur_path = get_parent_path(cur_path)
def _validate_sequenced_vhds(staging_path):
"""This check ensures that the VHDs in the staging area are sequenced
properly from 0 to n-1 with no gaps.
"""
seq_num = 0
filenames = os.listdir(staging_path)
for filename in filenames:
if not filename.endswith('.vhd'):
continue
# Ignore legacy swap embedded in the image, generated on-the-fly now
if filename == "swap.vhd":
continue
vhd_path = os.path.join(staging_path, "%d.vhd" % seq_num)
if not os.path.exists(vhd_path):
raise Exception("Corrupt image. Expected seq number: %d. Files: %s"
% (seq_num, filenames))
seq_num += 1
def import_vhds(sr_path, staging_path, uuid_stack):
"""Move VHDs from staging area into the SR.
The staging area is necessary because we need to perform some fixups
(assigning UUIDs, relinking the VHD chain) before moving into the SR,
otherwise the SR manager process could potentially delete the VHDs out from
under us.
Returns: A dict of imported VHDs:
{'root': {'uuid': 'ffff-aaaa'}}
"""
_handle_old_style_images(staging_path)
_validate_sequenced_vhds(staging_path)
files_to_move = []
# Collect sequenced VHDs and assign UUIDs to them
seq_num = 0
while True:
orig_vhd_path = os.path.join(staging_path, "%d.vhd" % seq_num)
if not os.path.exists(orig_vhd_path):
break
# Rename (0, 1 .. N).vhd -> aaaa-bbbb-cccc-dddd.vhd
vhd_uuid = uuid_stack.pop()
vhd_path = os.path.join(staging_path, "%s.vhd" % vhd_uuid)
_rename(orig_vhd_path, vhd_path)
if seq_num == 0:
leaf_vhd_path = vhd_path
leaf_vhd_uuid = vhd_uuid
files_to_move.append(vhd_path)
seq_num += 1
# Re-link VHDs, in reverse order, from base-copy -> leaf
parent_path = None
for vhd_path in reversed(files_to_move):
if parent_path:
# Link to parent
modify_cmd = ["vhd-util", "modify", "-n", vhd_path,
"-p", parent_path]
run_command(modify_cmd)
parent_path = vhd_path
# Sanity check the leaf VHD
_assert_vhd_not_hidden(leaf_vhd_path)
_validate_vdi_chain(leaf_vhd_path)
# Move files into SR
for orig_path in files_to_move:
new_path = os.path.join(sr_path, os.path.basename(orig_path))
_rename(orig_path, new_path)
imported_vhds = dict(root=dict(uuid=leaf_vhd_uuid))
return imported_vhds
def prepare_staging_area(sr_path, staging_path, vdi_uuids, seq_num=0):
"""Hard-link VHDs into staging area."""
for vdi_uuid in vdi_uuids:
source = os.path.join(sr_path, "%s.vhd" % vdi_uuid)
link_name = os.path.join(staging_path, "%d.vhd" % seq_num)
_link(source, link_name)
seq_num += 1
def create_tarball(fileobj, path, callback=None, compression_level=None):
"""Create a tarball from a given path.
:param fileobj: a file-like object holding the tarball byte-stream.
If None, then only the callback will be used.
:param path: path to create tarball from
:param callback: optional callback to call on each chunk written
:param compression_level: compression level, e.g., 9 for gzip -9.
"""
tar_cmd = ["tar", "-zc", "--directory=%s" % path, "."]
env = os.environ.copy()
if compression_level and 1 <= compression_level <= 9:
env["GZIP"] = "-%d" % compression_level
tar_proc = make_subprocess(tar_cmd, stdout=True, stderr=True, env=env)
try:
while True:
chunk = tar_proc.stdout.read(CHUNK_SIZE)
if chunk == '':
break
if callback:
callback(chunk)
if fileobj:
fileobj.write(chunk)
except Exception:
try_kill_process(tar_proc)
raise
finish_subprocess(tar_proc, tar_cmd)
def extract_tarball(fileobj, path, callback=None):
"""Extract a tarball to a given path.
:param fileobj: a file-like object holding the tarball byte-stream
:param path: path to extract tarball into
:param callback: optional callback to call on each chunk read
"""
tar_cmd = ["tar", "-zx", "--directory=%s" % path]
tar_proc = make_subprocess(tar_cmd, stderr=True, stdin=True)
try:
while True:
chunk = fileobj.read(CHUNK_SIZE)
if chunk == '':
break
if callback:
callback(chunk)
tar_proc.stdin.write(chunk)
# NOTE(tpownall): If we do not poll for the tar process exit
# code when tar has exited pre maturely there is the chance
# that tar will become a defunct zombie child under glance plugin
# and re parented under init forever waiting on the stdin pipe to
# close. Polling for the exit code allows us to break the pipe.
returncode = tar_proc.poll()
tar_pid = tar_proc.pid
if returncode is not None:
LOG.error("tar extract with process id '%(pid)s' "
"exited early with '%(rc)s'" %
{'pid': tar_pid, 'rc': returncode})
raise SubprocessException(
' '.join(tar_cmd), returncode, "", "")
except SubprocessException:
# no need to kill already dead process
raise
except Exception:
LOG.exception("Failed while sending data to tar pid: %s" % tar_pid)
try_kill_process(tar_proc)
raise
finish_subprocess(tar_proc, tar_cmd)
def _handle_serialization(func):
def wrapped(session, params):
params = pickle.loads(params['params'])
rv = func(session, *params['args'], **params['kwargs'])
return pickle.dumps(rv)
return wrapped
def register_plugin_calls(*funcs):
"""Wrapper around XenAPIPlugin.dispatch which handles pickle
serialization.
"""
wrapped_dict = {}
for func in funcs:
wrapped_dict[func.__name__] = _handle_serialization(func)
XenAPIPlugin.dispatch(wrapped_dict)
| 33.215842 | 79 | 0.627519 |
73ffa25789109ecc7cf072580326ec2d8581868f | 1,048 | py | Python | datasets/MOT/seed/Impl/MOT20.py | zhangzhengde0225/SwinTrack | 526be17f8ef266cb924c6939bd8dda23e9b73249 | [
"MIT"
] | 143 | 2021-12-03T02:33:36.000Z | 2022-03-29T00:01:48.000Z | datasets/MOT/seed/Impl/MOT20.py | zhangzhengde0225/SwinTrack | 526be17f8ef266cb924c6939bd8dda23e9b73249 | [
"MIT"
] | 33 | 2021-12-03T10:32:05.000Z | 2022-03-31T02:13:55.000Z | datasets/MOT/seed/Impl/MOT20.py | zhangzhengde0225/SwinTrack | 526be17f8ef266cb924c6939bd8dda23e9b73249 | [
"MIT"
] | 24 | 2021-12-04T06:46:42.000Z | 2022-03-30T07:57:47.000Z | from datasets.MOT.constructor.base_interface import MultipleObjectTrackingDatasetConstructor
def get_mot_class_definition():
return {
1: 'Pedestrian',
2: 'Person on vehicle',
3: 'Car',
4: 'Bicycle',
5: 'Motorbike',
6: 'Non motorized vehicle',
7: 'Static person',
8: 'Distractor',
9: 'Occluder',
10: 'Occluder on the ground',
11: 'Occluder full',
12: 'Reflection',
13: '(Unknown)'
}
def get_mot20_sequences_from_path(sequences):
valid_sequences = {}
for sequence in sequences:
words = sequence.split('-')
assert len(words) == 2
assert words[0] == 'MOT20'
if words[1] not in valid_sequences:
valid_sequences[words[1]] = sequence
return valid_sequences.values()
def construct_MOT20(constructor: MultipleObjectTrackingDatasetConstructor, seed):
from .MOT17 import construct_MOT
construct_MOT(constructor, seed, get_mot20_sequences_from_path, get_mot_class_definition())
| 29.111111 | 95 | 0.640267 |
73ffd71ca69ab0a7feb206328104e32fea5327f8 | 254 | py | Python | __init__.py | komoto48g/pyJeol | b4ef01656107d176b199d580cc4a1e0fa1652df5 | [
"MIT"
] | null | null | null | __init__.py | komoto48g/pyJeol | b4ef01656107d176b199d580cc4a1e0fa1652df5 | [
"MIT"
] | null | null | null | __init__.py | komoto48g/pyJeol | b4ef01656107d176b199d580cc4a1e0fa1652df5 | [
"MIT"
] | null | null | null | #! python
# -*- coding: utf-8 -*-
"""pyJeol package
Last updated: <2021-12-28 16:43:38 +0900>
Version: None
Author: Kazuya O'moto <komoto@jeol.co.jp>
"""
from .temisc import Environ
from .temisc import mrange
from .temisc import FLHex, OLHex
| 21.166667 | 47 | 0.673228 |