function
stringlengths
16
7.61k
repo_name
stringlengths
9
46
features
sequence
def build_create_by_id_request( policy_assignment_id: str, *, json: JSONType = None, content: Any = None, **kwargs: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def build_get_by_id_request( policy_assignment_id: str, **kwargs: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def delete( self, scope: str, policy_assignment_name: str, **kwargs: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def create( self, scope: str, policy_assignment_name: str, parameters: "_models.PolicyAssignment", **kwargs: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get( self, scope: str, policy_assignment_name: str, **kwargs: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def list_for_resource_group( self, resource_group_name: str, filter: Optional[str] = None, **kwargs: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def prepare_request(next_link=None): if not next_link:
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def extract_data(pipeline_response): deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def list_for_resource( self, resource_group_name: str, resource_provider_namespace: str, parent_resource_path: str, resource_type: str, resource_name: str, filter: Optional[str] = None, **kwargs: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def prepare_request(next_link=None): if not next_link:
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def extract_data(pipeline_response): deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def list( self, filter: Optional[str] = None, **kwargs: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def prepare_request(next_link=None): if not next_link:
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def extract_data(pipeline_response): deserialized = self._deserialize("PolicyAssignmentListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def delete_by_id( self, policy_assignment_id: str, **kwargs: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def create_by_id( self, policy_assignment_id: str, parameters: "_models.PolicyAssignment", **kwargs: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get_by_id( self, policy_assignment_id: str, **kwargs: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__(self, filename="Default.log"): self.terminal = sys.stdout self.log = open(filename, "a")
loretoparisi/docker
[ 92, 66, 92, 4, 1480539778 ]
def flush(self): pass
loretoparisi/docker
[ 92, 66, 92, 4, 1480539778 ]
def format_fans(fans): return format_line(prefix='fans'.rjust(RJUST), values=fans)
Bengt/AL-FanControl
[ 16, 4, 16, 5, 1443956681 ]
def format_pwms(pwms): return format_line(prefix='pwms'.rjust(RJUST), values=pwms)
Bengt/AL-FanControl
[ 16, 4, 16, 5, 1443956681 ]
def format_names(names): return format_line(prefix='names'.rjust(RJUST), values=names)
Bengt/AL-FanControl
[ 16, 4, 16, 5, 1443956681 ]
def format_temps(temps): return format_line(prefix='temps'.rjust(RJUST), values=temps)
Bengt/AL-FanControl
[ 16, 4, 16, 5, 1443956681 ]
def format_limits(limits): return format_line(prefix='limits'.rjust(RJUST), values=limits)
Bengt/AL-FanControl
[ 16, 4, 16, 5, 1443956681 ]
def format_headrooms(headrooms): return format_line(prefix='headrooms'.rjust(RJUST), values=headrooms)
Bengt/AL-FanControl
[ 16, 4, 16, 5, 1443956681 ]
def format_differences(differences): return format_line(prefix='differences'.rjust(RJUST), values=differences)
Bengt/AL-FanControl
[ 16, 4, 16, 5, 1443956681 ]
def forwards(self, orm): # Adding model 'Package' db.create_table(u'api_package', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500, db_index=True)), ('url', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500)), ('created_at', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)), )) db.send_create_signal(u'api', ['Package']) # Adding unique constraint on 'Package', fields ['name', 'url'] db.create_unique(u'api_package', ['name', 'url'])
toranb/django-bower-registry
[ 16, 3, 16, 1, 1375493973 ]
def __init__(self, host='localhost', port=8125, enabled=True, prefix=''): self.addr = None self.enabled = enabled if enabled: self.set_address(host, port) self.prefix = prefix self.udp_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
smarkets/smk_python_sdk
[ 18, 11, 18, 3, 1313778515 ]
def timed(self, stat, sample_rate=1): log.debug('Entering timed context for %r' % (stat,)) start = time.time() yield duration = int((time.time() - start) * 1000) log.debug('Exiting timed context for %r' % (stat,)) self.timing(stat, duration, sample_rate)
smarkets/smk_python_sdk
[ 18, 11, 18, 3, 1313778515 ]
def increment(self, stats, sample_rate=1): """ Increments one or more stats counters """ self.update_stats(stats, 1, sample_rate)
smarkets/smk_python_sdk
[ 18, 11, 18, 3, 1313778515 ]
def update_stats(self, stats, delta=1, sampleRate=1): """ Updates one or more stats counters by arbitrary amounts """ if not self.enabled or self.addr is None: return if type(stats) is not list: stats = [stats] data = {} for stat in stats: data["%s%s" % (self.prefix, stat)] = "%s|c" % delta self.send(data, sampleRate)
smarkets/smk_python_sdk
[ 18, 11, 18, 3, 1313778515 ]
def __init__(self, raw_data): self._raw = raw_data
hfaran/slack-export-viewer
[ 731, 161, 731, 37, 1456480543 ]
def display_name(self): """ Find the most appropriate display name for a user: look for a "display_name", then a "real_name", and finally fall back to the always-present "name". """ for k in self._NAME_KEYS: if self._raw.get(k): return self._raw[k] if "profile" in self._raw and self._raw["profile"].get(k): return self._raw["profile"][k] return self._raw["name"]
hfaran/slack-export-viewer
[ 731, 161, 731, 37, 1456480543 ]
def email(self): """ Shortcut property for finding the e-mail address or bot URL. """ if "profile" in self._raw: email = self._raw["profile"].get("email") elif "bot_url" in self._raw: email = self._raw["bot_url"] else: email = None if not email: logging.debug("No email found for %s", self._raw.get("name")) return email
hfaran/slack-export-viewer
[ 731, 161, 731, 37, 1456480543 ]
def setUp(self): super(BaseSystemTest, self).setUp() # Clear out any pre-existing tables for tablename in self.dynamo.list_tables(): self.dynamo.delete_table(tablename)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def tearDown(self): super(TestMisc, self).tearDown() self.dynamo.default_return_capacity = False
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_connection_region(self): """Connection can access name of connected region""" self.assertTrue(isinstance(self.dynamo.region, str))
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_connect_to_region_creds(self): """Can connect to a dynamo region with credentials""" conn = DynamoDBConnection.connect( "us-west-1", access_key="abc", secret_key="12345" ) self.assertIsNotNone(conn.host)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_retry_on_throughput_error(self, time): """Throughput exceptions trigger a retry of the request""" def call(*_, **__): """Dummy service call""" response = { "ResponseMetadata": { "HTTPStatusCode": 400, }, "Error": { "Code": "ProvisionedThroughputExceededException", "Message": "Does not matter", }, } raise ClientError(response, "list_tables") with patch.object(self.dynamo, "client") as client: client.list_tables.side_effect = call with self.assertRaises(ThroughputException): self.dynamo.call("list_tables") self.assertEqual(len(time.sleep.mock_calls), self.dynamo.request_retries - 1) self.assertTrue(time.sleep.called)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_magic_table_props(self): """Table can look up properties on response object""" hash_key = DynamoKey("id") self.dynamo.create_table("foobar", hash_key=hash_key) ret = self.dynamo.describe_table("foobar") assert ret is not None self.assertEqual(ret.item_count, ret["ItemCount"]) with self.assertRaises(KeyError): self.assertIsNotNone(ret["Missing"])
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_describe_during_delete(self): """Describing a table during a delete operation should not crash""" response = { "ItemCount": 0, "ProvisionedThroughput": { "NumberOfDecreasesToday": 0, "ReadCapacityUnits": 5, "WriteCapacityUnits": 5, }, "TableName": "myTableName", "TableSizeBytes": 0, "TableStatus": "DELETING", } table = Table.from_response(response) self.assertEqual(table.status, "DELETING")
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_re_raise_passthrough(self): """DynamoDBError can re-raise itself if missing original exception""" err = DynamoDBError(400, Code="ErrCode", Message="Ouch", args={}) caught = False try: err.re_raise() except DynamoDBError as e: caught = True self.assertEqual(err, e) self.assertTrue(caught)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_default_return_capacity(self): """When default_return_capacity=True, always return capacity""" self.dynamo.default_return_capacity = True with patch.object(self.dynamo, "call") as call: call().get.return_value = None rs = self.dynamo.scan("foobar") list(rs) call.assert_called_with( "scan", TableName="foobar", ReturnConsumedCapacity="INDEXES", ConsistentRead=False, )
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_limit_complete(self): """A limit with item_capacity = 0 is 'complete'""" limit = Limit(item_limit=0) self.assertTrue(limit.complete)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_wait_delete_table(self): """Delete table shall wait for the table to go offline.""" tablename = "foobar_wait" hash_key = DynamoKey("id") self.dynamo.create_table(tablename, hash_key=hash_key, wait=True) result = self.dynamo.delete_table(tablename, wait=True) self.assertTrue(result)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def make_table(self): """Convenience method for making a table""" hash_key = DynamoKey("id") self.dynamo.create_table("foobar", hash_key=hash_key)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_int(self): """Store and retrieve an int""" self.make_table() self.dynamo.put_item("foobar", {"id": "a", "num": 1}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(item["num"], 1)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_decimal(self): """Store and retrieve a Decimal""" self.make_table() self.dynamo.put_item("foobar", {"id": "a", "num": Decimal("1.1")}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(item["num"], Decimal("1.1"))
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_binary_bytes(self): """Store and retrieve bytes as a binary""" self.make_table() data = {"a": 1, "b": 2} self.dynamo.put_item("foobar", {"id": "a", "data": Binary(dumps(data))}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(loads(item["data"].value), data)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_number_set(self): """Store and retrieve a number set""" self.make_table() item = { "id": "a", "datas": set([1, 2, 3]), } self.dynamo.put_item("foobar", item) ret = list(self.dynamo.scan("foobar"))[0] self.assertEqual(ret, item)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_binary_equal(self): """Binary should eq other Binaries and also raw bytestrings""" self.assertEqual(Binary("a"), Binary("a")) self.assertEqual(Binary("a"), b"a") self.assertFalse(Binary("a") != Binary("a"))
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_binary_converts_unicode(self): """Binary will convert unicode to bytes""" b = Binary("a") self.assertTrue(isinstance(b.value, bytes))
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_bool(self): """Store and retrieve a boolean""" self.make_table() self.dynamo.put_item("foobar", {"id": "abc", "b": True}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(item["b"], True) self.assertTrue(isinstance(item["b"], bool))
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_dict(self): """Store and retrieve a dict""" self.make_table() data = { "i": 1, "s": "abc", "n": None, "l": ["a", 1, True], "b": False, } self.dynamo.put_item("foobar", {"id": "abc", "d": data}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(item["d"], data)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_nested_list(self): """Store and retrieve a nested list""" self.make_table() data = [ 1, [ True, None, "abc", ], ] self.dynamo.put_item("foobar", {"id": "abc", "l": data}) item = list(self.dynamo.scan("foobar"))[0] self.assertEqual(item["l"], data)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_register_encoder(self): """Can register a custom encoder""" from datetime import datetime dynamizer = Dynamizer() dynamizer.register_encoder(datetime, lambda d, v: (STRING, v.isoformat())) now = datetime.utcnow() self.assertEqual(dynamizer.raw_encode(now), (STRING, now.isoformat()))
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_add_dicts_base_case(self): """add_dict where one argument is None returns the other""" f = object() self.assertEqual(add_dicts(f, None), f) self.assertEqual(add_dicts(None, f), f)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_count_repr(self): """Count repr""" count = Count(0, 0) self.assertEqual(repr(count), "Count(0)")
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_count_subtraction(self): """Count subtraction""" count = Count(4, 2) self.assertEqual(count - 2, 2)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_count_division(self): """Count division""" count = Count(4, 2) self.assertEqual(count / 2, 2)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_count_add_capacity(self): """Count addition with consumed_capacity""" count = Count(4, 2, Capacity(3, 0)) count2 = Count(5, 3, Capacity(2, 0)) ret = count + count2 self.assertEqual(ret, 9) self.assertEqual(ret.scanned_count, 5) self.assertEqual(ret.consumed_capacity.read, 5)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_capacity_format(self): """String formatting for Capacity""" c = Capacity(1, 3) self.assertEqual(str(c), "R:1.0 W:3.0") c = Capacity(0, 0) self.assertEqual(str(c), "0")
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_consumed_capacity_equality(self): """ConsumedCapacity addition and equality""" cap = ConsumedCapacity( "foobar", Capacity(0, 10), Capacity(0, 2), { "l-index": Capacity(0, 4), }, { "g-index": Capacity(0, 3), }, ) c2 = ConsumedCapacity( "foobar", Capacity(0, 10), Capacity(0, 2), { "l-index": Capacity(0, 4), "l-index2": Capacity(0, 7), }, ) self.assertNotEqual(cap, c2) c3 = ConsumedCapacity( "foobar", Capacity(0, 10), Capacity(0, 2), { "l-index": Capacity(0, 4), }, { "g-index": Capacity(0, 3), }, ) self.assertIn(cap, set([c3])) combined = cap + c2 self.assertEqual( cap + c2, ConsumedCapacity( "foobar", Capacity(0, 20), Capacity(0, 4), { "l-index": Capacity(0, 8), "l-index2": Capacity(0, 7), }, { "g-index": Capacity(0, 3), }, ), ) self.assertIn(str(Capacity(0, 3)), str(combined))
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_always_continue_query(self): """Regression test. If result has no items but does have LastEvaluatedKey, keep querying. """ conn = MagicMock() conn.dynamizer.decode_keys.side_effect = lambda x: x items = ["a", "b"] results = [ {"Items": [], "LastEvaluatedKey": {"foo": 1, "bar": 2}}, {"Items": [], "LastEvaluatedKey": {"foo": 1, "bar": 2}}, {"Items": items}, ] conn.call.side_effect = lambda *_, **__: results.pop(0) rs = ResultSet(conn, Limit()) results = list(rs) self.assertEqual(results, items)
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def tearDown(self): super(TestHooks, self).tearDown() for hooks in self.dynamo._hooks.values(): while hooks: hooks.pop()
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def throw(**_): """Throw an exception to terminate the request""" raise Exception()
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def test_postcall(self): """postcall hooks are called after API call""" hash_key = DynamoKey("id") self.dynamo.create_table("foobar", hash_key=hash_key) calls = [] def hook(*args): """Log the call into a list""" calls.append(args) self.dynamo.subscribe("postcall", hook) self.dynamo.describe_table("foobar") self.assertEqual(len(calls), 1) args = calls[0] self.assertEqual(len(args), 4) conn, command, kwargs, response = args self.assertEqual(conn, self.dynamo) self.assertEqual(command, "describe_table") self.assertEqual(kwargs["TableName"], "foobar") self.assertEqual(response["Table"]["TableName"], "foobar")
stevearc/dynamo3
[ 13, 10, 13, 1, 1393242602 ]
def _setup(self, storage_account_name, key): # test chunking functionality by reducing the threshold # for chunking and the size of each chunk, otherwise # the tests would take too long to execute self.bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=key, max_single_put_size=32 * 1024, max_block_size=2 * 1024 * 1024, min_large_block_upload_threshold=1 * 1024 * 1024) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') if self.is_live: try: self.bsc.create_container(self.container_name) except: pass
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def _get_blob_reference(self): return self.get_resource_name(TEST_BLOB_PREFIX)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def assertBlobEqual(self, container_name, blob_name, expected_data): blob = self.bsc.get_blob_client(container_name, blob_name) actual_data = blob.download_blob() self.assertEqual(b"".join(list(actual_data.chunks())), expected_data)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_put_block_bytes_large(self, storage_account_name, storage_account_key): self._setup(storage_account_name, storage_account_key) blob = self._create_blob() # Act for i in range(5): resp = blob.stage_block( 'block {0}'.format(i).encode('utf-8'), urandom(LARGE_BLOCK_SIZE)) self.assertIsNotNone(resp) assert 'content_md5' in resp assert 'content_crc64' in resp assert 'request_id' in resp # Assert
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_put_block_bytes_large_with_md5(self, storage_account_name, storage_account_key): self._setup(storage_account_name, storage_account_key) blob = self._create_blob() # Act for i in range(5): resp = blob.stage_block( 'block {0}'.format(i).encode('utf-8'), urandom(LARGE_BLOCK_SIZE), validate_content=True) self.assertIsNotNone(resp) assert 'content_md5' in resp assert 'content_crc64' in resp assert 'request_id' in resp
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_put_block_stream_large(self, storage_account_name, storage_account_key): self._setup(storage_account_name, storage_account_key) blob = self._create_blob() # Act for i in range(5): stream = BytesIO(bytearray(LARGE_BLOCK_SIZE)) resp = resp = blob.stage_block( 'block {0}'.format(i).encode('utf-8'), stream, length=LARGE_BLOCK_SIZE) self.assertIsNotNone(resp) assert 'content_md5' in resp assert 'content_crc64' in resp assert 'request_id' in resp # Assert
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_put_block_stream_large_with_md5(self, storage_account_name, storage_account_key): self._setup(storage_account_name, storage_account_key) blob = self._create_blob() # Act for i in range(5): stream = BytesIO(bytearray(LARGE_BLOCK_SIZE)) resp = resp = blob.stage_block( 'block {0}'.format(i).encode('utf-8'), stream, length=LARGE_BLOCK_SIZE, validate_content=True) self.assertIsNotNone(resp) assert 'content_md5' in resp assert 'content_crc64' in resp assert 'request_id' in resp # Assert
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_create_large_blob_from_path(self, storage_account_name, storage_account_key): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = 'large_blob_from_path.temp.{}.dat'.format(str(uuid.uuid4())) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: blob.upload_blob(stream, max_concurrency=2, overwrite=True) block_list = blob.get_block_list() # Assert self.assertIsNot(len(block_list), 0) self.assertBlobEqual(self.container_name, blob_name, data) self._teardown(FILE_PATH)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_create_large_blob_from_path_with_md5(self, storage_account_name, storage_account_key): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = "blob_from_path_with_md5.temp.dat" with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: blob.upload_blob(stream, validate_content=True, max_concurrency=2) # Assert self.assertBlobEqual(self.container_name, blob_name, data) self._teardown(FILE_PATH)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_create_large_blob_from_path_non_parallel(self, storage_account_name, storage_account_key): self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(self.get_random_bytes(100)) FILE_PATH = "blob_from_path_non_parallel.temp.dat" with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: blob.upload_blob(stream, max_concurrency=1) # Assert self.assertBlobEqual(self.container_name, blob_name, data) self._teardown(FILE_PATH)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_create_large_blob_from_path_with_progress(self, storage_account_name, storage_account_key): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = "blob_from_path_with_progress.temp.dat" with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act progress = [] def callback(response): current = response.context['upload_stream_current'] total = response.context['data_stream_total'] if current is not None: progress.append((current, total)) with open(FILE_PATH, 'rb') as stream: blob.upload_blob(stream, max_concurrency=2, raw_response_hook=callback) # Assert self.assertBlobEqual(self.container_name, blob_name, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) self._teardown(FILE_PATH)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_create_large_blob_from_path_with_properties(self, storage_account_name, storage_account_key): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = 'blob_from_path_with_properties.temp.{}.dat'.format(str(uuid.uuid4())) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act content_settings = ContentSettings( content_type='image/png', content_language='spanish') with open(FILE_PATH, 'rb') as stream: blob.upload_blob(stream, content_settings=content_settings, max_concurrency=2) # Assert self.assertBlobEqual(self.container_name, blob_name, data) properties = blob.get_blob_properties() self.assertEqual(properties.content_settings.content_type, content_settings.content_type) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) self._teardown(FILE_PATH)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_create_large_blob_from_stream_chunked_upload(self, storage_account_name, storage_account_key): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = 'blob_from_stream_chunked_upload.temp.{}.dat'.format(str(uuid.uuid4())) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: blob.upload_blob(stream, max_concurrency=2) # Assert self.assertBlobEqual(self.container_name, blob_name, data) self._teardown(FILE_PATH)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_creat_lrgblob_frm_stream_w_progress_chnkd_upload(self, storage_account_name, storage_account_key): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = 'stream_w_progress_chnkd_upload.temp.{}.dat'.format(str(uuid.uuid4())) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act progress = [] def callback(response): current = response.context['upload_stream_current'] total = response.context['data_stream_total'] if current is not None: progress.append((current, total)) with open(FILE_PATH, 'rb') as stream: blob.upload_blob(stream, max_concurrency=2, raw_response_hook=callback) # Assert self.assertBlobEqual(self.container_name, blob_name, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) self._teardown(FILE_PATH)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_create_large_blob_from_stream_chunked_upload_with_count(self, storage_account_name, storage_account_key): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = 'chunked_upload_with_count.temp.{}.dat'.format(str(uuid.uuid4())) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act blob_size = len(data) - 301 with open(FILE_PATH, 'rb') as stream: blob.upload_blob(stream, length=blob_size, max_concurrency=2) # Assert self.assertBlobEqual(self.container_name, blob_name, data[:blob_size]) self._teardown(FILE_PATH)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_creat_lrgblob_frm_strm_chnkd_uplod_w_count_n_props(self, storage_account_name, storage_account_key): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = 'plod_w_count_n_props.temp.{}.dat'.format(str(uuid.uuid4())) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act content_settings = ContentSettings( content_type='image/png', content_language='spanish') blob_size = len(data) - 301 with open(FILE_PATH, 'rb') as stream: blob.upload_blob( stream, length=blob_size, content_settings=content_settings, max_concurrency=2) # Assert self.assertBlobEqual(self.container_name, blob_name, data[:blob_size]) properties = blob.get_blob_properties() self.assertEqual(properties.content_settings.content_type, content_settings.content_type) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) self._teardown(FILE_PATH)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def test_creat_lrg_blob_frm_stream_chnked_upload_w_props(self, storage_account_name, storage_account_key): # parallel tests introduce random order of requests, can only run live self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = 'creat_lrg_blob.temp.{}.dat'.format(str(uuid.uuid4())) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act content_settings = ContentSettings( content_type='image/png', content_language='spanish') with open(FILE_PATH, 'rb') as stream: blob.upload_blob(stream, content_settings=content_settings, max_concurrency=2) # Assert self.assertBlobEqual(self.container_name, blob_name, data) properties = blob.get_blob_properties() self.assertEqual(properties.content_settings.content_type, content_settings.content_type) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) self._teardown(FILE_PATH)
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def add(request): print "jojo"
spezifanta/Paste-It
[ 2, 1, 2, 1, 1325791524 ]
def setUp(self): pass
ssut/py-hanspell
[ 245, 81, 245, 18, 1433951949 ]
def test_words(self): result = spell_checker.check(u'ν•œμ•„μ΄κ°€ μž₯λ‚œκΉœμ„ κ°–κ³ λ†€κ³ μžˆλ‹€. κ·Έλ§Œν•˜κ²Œ ν• κ°€?') assert result.errors == 4
ssut/py-hanspell
[ 245, 81, 245, 18, 1433951949 ]
def test_list(self): results = spell_checker.check([u'μ•ˆλ…• ν•˜μ„Έμš”.', u'μ €λŠ” ν•œκ΅­μΈ μž…λ‹ˆλ‹€.']) assert results[0].checked == u'μ•ˆλ…•ν•˜μ„Έμš”.' assert results[1].checked == u'μ €λŠ” ν•œκ΅­μΈμž…λ‹ˆλ‹€.'
ssut/py-hanspell
[ 245, 81, 245, 18, 1433951949 ]
def addOperators(self, num, target): """ Adapted from https://leetcode.com/discuss/58614/java-standard-backtrace-ac-solutoin-short-and-clear Algorithm: 1. DFS 2. Special handling for multiplication 3. Detect invalid number with leading 0's :type num: str :type target: int :rtype: List[str] """ ret = [] self.dfs(num, target, 0, "", 0, 0, ret) return ret
algorhythms/LeetCode
[ 823, 267, 823, 3, 1403872362 ]
def session(): Session = sessionmaker() engine = create_engine(MYSQL_CONNECTION_STRING) Session.configure(bind=engine) metadata.create_all(engine) try: yield Session() except: pass
uber-common/opentracing-python-instrumentation
[ 164, 57, 164, 22, 1452851374 ]
def patch_sqlalchemy(): mysqldb_hooks.install_patches() try: yield finally: mysqldb_hooks.reset_patches()
uber-common/opentracing-python-instrumentation
[ 164, 57, 164, 22, 1452851374 ]
def assert_span(span, operation, parent=None): assert span.operation_name == 'MySQLdb:' + operation assert span.tags.get(tags.SPAN_KIND) == tags.SPAN_KIND_RPC_CLIENT if parent: assert span.parent_id == parent.context.span_id assert span.context.trace_id == parent.context.trace_id else: assert span.parent_id is None
uber-common/opentracing-python-instrumentation
[ 164, 57, 164, 22, 1452851374 ]
def make_author(): return Author( id=fake.random_int(), first_name=fake.first_name(), last_name=fake.last_name(), twitter=fake.domain_word(), )
marshmallow-code/marshmallow-jsonapi
[ 218, 64, 218, 52, 1442088655 ]
def make_comment(with_author=True): author = make_author() if with_author else None return Comment(id=fake.random_int(), body=fake.bs(), author=author)
marshmallow-code/marshmallow-jsonapi
[ 218, 64, 218, 52, 1442088655 ]
def author(): return make_author()
marshmallow-code/marshmallow-jsonapi
[ 218, 64, 218, 52, 1442088655 ]
def authors(): return [make_author() for _ in range(3)]
marshmallow-code/marshmallow-jsonapi
[ 218, 64, 218, 52, 1442088655 ]
def comments(): return [make_comment() for _ in range(3)]
marshmallow-code/marshmallow-jsonapi
[ 218, 64, 218, 52, 1442088655 ]
def post(): return make_post()
marshmallow-code/marshmallow-jsonapi
[ 218, 64, 218, 52, 1442088655 ]
def post_with_null_comment(): return make_post(with_comments=False)
marshmallow-code/marshmallow-jsonapi
[ 218, 64, 218, 52, 1442088655 ]