text
stringlengths 2
99.9k
| meta
dict |
---|---|
<?xml version="1.0" encoding="UTF-8"?>
<segment>
<name>PD1</name>
<description>Patient Additional Demographic</description>
<elements>
<field minOccurs="0" maxOccurs="0">
<name>PD1.1</name>
<description>Living Dependency</description>
<datatype>IS</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.2</name>
<description>Living Arrangement</description>
<datatype>IS</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.3</name>
<description>Patient Primary Facility</description>
<datatype>XON</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.4</name>
<description>Patient Primary Care Provider Name & ID No.</description>
<datatype>XCN</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.5</name>
<description>Student Indicator</description>
<datatype>IS</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.6</name>
<description>Handicap</description>
<datatype>IS</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.7</name>
<description>Living Will Code</description>
<datatype>IS</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.8</name>
<description>Organ Donor Code</description>
<datatype>IS</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.9</name>
<description>Separate Bill</description>
<datatype>ID</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.10</name>
<description>Duplicate Patient</description>
<datatype>CX</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.11</name>
<description>Publicity Code</description>
<datatype>CE</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.12</name>
<description>Protection Indicator</description>
<datatype>ID</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.13</name>
<description>Protection Indicator Effective Date</description>
<datatype>DT</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.14</name>
<description>Place of Worship</description>
<datatype>XON</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.15</name>
<description>Advance Directive Code</description>
<datatype>CE</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.16</name>
<description>Immunization Registry Status</description>
<datatype>IS</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.17</name>
<description>Immunization Registry Status Effective Date</description>
<datatype>DT</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.18</name>
<description>Publicity Code Effective Date</description>
<datatype>DT</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.19</name>
<description>Military Branch</description>
<datatype>IS</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.20</name>
<description>Military Rank/Grade</description>
<datatype>IS</datatype>
</field>
<field minOccurs="0" maxOccurs="0">
<name>PD1.21</name>
<description>Military Status</description>
<datatype>IS</datatype>
</field>
</elements>
</segment>
| {
"pile_set_name": "Github"
} |
{
"fpsLimit": 60,
"preset": "basic",
"background": {
"color": "#0d47a1",
"image": "",
"position": "50% 50%",
"repeat": "no-repeat",
"size": "cover"
}
} | {
"pile_set_name": "Github"
} |
The two classes `KinesisRecorder` and `KinesisFirehoseRecorder` allow you to interface with Amazon Kinesis Data Streams and Amazon Kinesis Data Firehose to stream analytics data for real-time processing.
## What is Amazon Kinesis Data Streams?
[Amazon Kinesis Data Streams](http://aws.amazon.com/kinesis/) is a fully managed service for real-time processing of streaming data at massive scale. Amazon Kinesis can collect and process hundreds of terabytes of data per hour from hundreds of thousands of sources, so you can write applications that process information in real-time. With Amazon Kinesis applications, you can build real-time dashboards, capture exceptions and generate alerts, drive recommendations, and make other real-time business or operational decisions. You can also easily send data to other services such as Amazon Simple Storage Service, Amazon DynamoDB, and Amazon Redshift.
The Kinesis Data Streams `KinesisRecorder` client lets you store your Kinesis requests on disk and then send them all at once using the [PutRecords](https://docs.aws.amazon.com/kinesis/latest/APIReference/API_PutRecords.html) API call of Kinesis. This is useful because many mobile applications that use Kinesis Data Streams will create multiple requests per second. Sending an individual request under `PutRecord` action could adversely impact battery life. Moreover, the requests could be lost if the device goes offline. Thus, using the high-level Kinesis Data Streams client for batching can preserve both battery life and data.
## What is Amazon Kinesis Data Firehose?
[Amazon Kinesis Data Firehose](http://aws.amazon.com/kinesis/firehose/) is a fully managed service for delivering real-time streaming data to destinations such as Amazon Simple Storage Service (Amazon S3) and Amazon Redshift. With Kinesis Data Firehose, you do not need to write any applications or manage any resources. You configure your data producers to send data to Firehose and it automatically delivers the data to the destination that you specified.
The Amazon Kinesis Data Firehose `KinesisFirehoseRecorder` client lets you store your Kinesis Data Firehose requests on disk and then send them using the [PutRecordBatch](https://docs.aws.amazon.com/firehose/latest/APIReference/API_PutRecordBatch.html) API call of Kinesis Data Firehose.
For more information about Amazon Kinesis Data Firehose, see [Amazon Kinesis Data Firehose](http://docs.aws.amazon.com/firehose/latest/dev/what-is-this-service.html).
## Integrating Amazon Kinesis
Set up AWS Mobile SDK components by including the following libraries in your `app/build.gradle` dependencies list.
```groovy
dependencies {
implementation 'com.amazonaws:aws-android-sdk-kinesis:2.15.+'
implementation ('com.amazonaws:aws-android-sdk-mobile-client:2.15.+@aar') { transitive = true }
}
```
* `aws-android-sdk-kinesis` library enables sending analytics to Amazon Kinesis.
* `aws-android-sdk-mobile-client` library gives access to the AWS credentials provider and configurations.
Add the following imports to the main activity of your app.
```java
import com.amazonaws.mobileconnectors.kinesis.kinesisrecorder.*;
import com.amazonaws.mobile.client.AWSMobileClient;
import com.amazonaws.regions.Regions;
```
To use Kinesis Data Streams in an application, you must set the correct permissions. The following IAM policy allows the user to submit records to a specific data stream, which is identified by [ARN](http://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html).
```json
{
"Statement": [{
"Effect": "Allow",
"Action": "kinesis:PutRecords",
"Resource": "arn:aws:kinesis:us-west-2:111122223333:stream/mystream"
}]
}
```
The following IAM policy allows the user to submit records to a specific Kinesis Data Firehose delivery stream.
```json
{
"Statement": [{
"Effect": "Allow",
"Action": "firehose:PutRecordBatch",
"Resource": "arn:aws:firehose:us-west-2:111122223333:deliverystream/mystream"
}]
}
```
This policy should be applied to roles assigned to the Amazon Cognito identity pool, but you need to replace the `Resource` value with the correct ARN for your Amazon Kinesis or Amazon Kinesis Data Firehose stream. You can apply policies at the [IAM console](https://console.aws.amazon.com/iam/). To learn more about IAM policies, see [Using IAM](http://docs.aws.amazon.com/IAM/latest/UserGuide/IAM_Introduction.html).
To learn more about Amazon Kinesis Data Streams policies, see [Controlling Access to Amazon Kinesis Data Streams Resources with IAM](http://docs.aws.amazon.com/kinesis/latest/dev/kinesis-using-iam.html).
To learn more about Amazon Kinesis Data Firehose policies, see [Controlling Access with Amazon Kinesis Data Firehose](http://docs.aws.amazon.com/firehose/latest/dev/controlling-access.html).
## Working with the API
You can use `AWSMobileClient` to setup the Cognito credentials that are required to authenticate your requests with Amazon Kinesis.
```java
AWSMobileClient.getInstance().initialize(getApplicationContext(), new Callback<UserStateDetails>() {
@Override
public void onResult(UserStateDetails userStateDetails) {
Log.i("INIT", userStateDetails.getUserState().toString());
}
@Override
public void onError(Exception e) {
Log.e("INIT", "Initialization error.", e);
}
}
);
```
Once you have credentials, you can use `KinesisRecorder` with Amazon Kinesis. The following snippet creates a directory and instantiates the `KinesisRecorder` client:
```java
String kinesisDirectory = "YOUR_UNIQUE_DIRECTORY";
KinesisRecorder recorder = new KinesisRecorder(
myActivity.getDir(kinesisDirectory, 0),
Regions.<YOUR-AWS-REGION>,
AWSMobileClient.getInstance()
);
// KinesisRecorder uses synchronous calls, so you shouldn't call KinesisRecorder methods on the main thread.
```
To use `KinesisFirehoseRecorder`, you need to pass the object in a directory where streaming data is saved. We recommend you use an app private directory because the data is not encrypted.
```java
KinesisFirehoseRecorder firehoseRecorder = new KinesisFirehoseRecorder(
context.getCachedDir(),
Regions.<YOUR-AWS-REGION>,
AWSMobileClient.getInstance());
```
Configure Kinesis:
You can configure `KinesisRecorder` or `KinesisFirehoseRecorder` through their properties:
You can configure the maximum allowed storage via the `withMaxStorageSize()` method of `KinesisRecorderConfig`.
You can retrieve the same information by getting the `KinesisRecorderConfig` object for the recorder and calling `getMaxStorageSize():`
```java
KinesisRecorderConfig kinesisRecorderConfig = recorder.getKinesisRecorderConfig();
Long maxStorageSize = kinesisRecorderConfig.getMaxStorageSize();
// Do something with maxStorageSize
```
To check the number of bytes currently stored in the directory passed in to the `KinesisRecorder` constructor, call `getDiskBytesUsed()`:
```java
Long bytesUsed = recorder.getDiskBytesUsed();
// Do something with bytesUsed
```
To see how much space the `KinesisRecorder` client is allowed to use, you can call `getDiskByteLimit()`.
```java
Long byteLimit = recorder.getDiskByteLimit();
// Do something with byteLimit
```
With `KinesisRecorder` created and configured, you can use `saveRecord()` to save records and then send them in a batch.
```java
recorder.saveRecord(
"MyData".getBytes(),
"MyStreamName");
recorder.submitAllRecords();
```
For the `saveRecord()` request above to work, you would have to have created a stream named `MyStreamName`. You can create new streams in the [Amazon Kinesis console](https://console.aws.amazon.com/kinesis).
If `submitAllRecords()` is called while the app is online, requests will be sent and removed from the disk. If `submitAllRecords()` is called while the app is offline, requests will be kept on disk until `submitAllRecords()` is called while online. This applies even if you lose your internet connection midway through a submit. So if you save ten requests, call `submitAllRecords()`, send five, and then lose the Internet connection, you have five requests left on disk. These remaining five will be sent the next time `submitAllRecords()` is invoked online.
Here is a similar snippet for Amazon Kinesis Data Firehose:
```java
// Start to save data, either a String or a byte array
firehoseRecorder.saveRecord("Hello world!\n");
firehoseRecorder.saveRecord("Streaming data to Amazon S3 via Amazon Kinesis Data Firehose is easy.\n");
// Send previously saved data to Amazon Kinesis Data Firehose
// Note: submitAllRecords() makes network calls, so wrap it in an AsyncTask.
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... v) {
try {
firehoseRecorder.submitAllRecords();
} catch (AmazonClientException ace) {
// handle error
}
}
}.execute();
```
To learn more about working with Kinesis Data Streams, see the [Amazon Kinesis Data Streams resources](http://aws.amazon.com/kinesis/developer-resources/).
To learn more about the Kinesis Data Streams classes, see the [class reference for KinesisRecorder](https://aws-amplify.github.io/aws-sdk-android/docs/reference/com/amazonaws/mobileconnectors/kinesis/kinesisrecorder/KinesisRecorder.html).
To learn more about the Kinesis Data Firehose classes, see the [class reference for KinesisFirehoseRecorder](https://aws-amplify.github.io/aws-sdk-android/docs/reference/com/amazonaws/mobileconnectors/kinesis/kinesisrecorder/KinesisFirehoseRecorder.html).
| {
"pile_set_name": "Github"
} |
///
/// Copyright (c) 2016 Dropbox, Inc. All rights reserved.
///
/// Auto-generated by Stone, do not modify.
///
#import <Foundation/Foundation.h>
#import "DBSerializableProtocol.h"
@class DBTEAMPOLICIESSharedFolderJoinPolicy;
NS_ASSUME_NONNULL_BEGIN
#pragma mark - API Object
///
/// The `SharedFolderJoinPolicy` union.
///
/// Policy governing which shared folders a team member can join.
///
/// This class implements the `DBSerializable` protocol (serialize and
/// deserialize instance methods), which is required for all Obj-C SDK API route
/// objects.
///
@interface DBTEAMPOLICIESSharedFolderJoinPolicy : NSObject <DBSerializable, NSCopying>
#pragma mark - Instance fields
/// The `DBTEAMPOLICIESSharedFolderJoinPolicyTag` enum type represents the
/// possible tag states with which the `DBTEAMPOLICIESSharedFolderJoinPolicy`
/// union can exist.
typedef NS_CLOSED_ENUM(NSInteger, DBTEAMPOLICIESSharedFolderJoinPolicyTag){
/// Team members can only join folders shared by teammates.
DBTEAMPOLICIESSharedFolderJoinPolicyFromTeamOnly,
/// Team members can join any shared folder, including those shared by users
/// outside the team.
DBTEAMPOLICIESSharedFolderJoinPolicyFromAnyone,
/// (no description).
DBTEAMPOLICIESSharedFolderJoinPolicyOther,
};
/// Represents the union's current tag state.
@property (nonatomic, readonly) DBTEAMPOLICIESSharedFolderJoinPolicyTag tag;
#pragma mark - Constructors
///
/// Initializes union class with tag state of "from_team_only".
///
/// Description of the "from_team_only" tag state: Team members can only join
/// folders shared by teammates.
///
/// @return An initialized instance.
///
- (instancetype)initWithFromTeamOnly;
///
/// Initializes union class with tag state of "from_anyone".
///
/// Description of the "from_anyone" tag state: Team members can join any shared
/// folder, including those shared by users outside the team.
///
/// @return An initialized instance.
///
- (instancetype)initWithFromAnyone;
///
/// Initializes union class with tag state of "other".
///
/// @return An initialized instance.
///
- (instancetype)initWithOther;
- (instancetype)init NS_UNAVAILABLE;
#pragma mark - Tag state methods
///
/// Retrieves whether the union's current tag state has value "from_team_only".
///
/// @return Whether the union's current tag state has value "from_team_only".
///
- (BOOL)isFromTeamOnly;
///
/// Retrieves whether the union's current tag state has value "from_anyone".
///
/// @return Whether the union's current tag state has value "from_anyone".
///
- (BOOL)isFromAnyone;
///
/// Retrieves whether the union's current tag state has value "other".
///
/// @return Whether the union's current tag state has value "other".
///
- (BOOL)isOther;
///
/// Retrieves string value of union's current tag state.
///
/// @return A human-readable string representing the union's current tag state.
///
- (NSString *)tagName;
@end
#pragma mark - Serializer Object
///
/// The serialization class for the `DBTEAMPOLICIESSharedFolderJoinPolicy`
/// union.
///
@interface DBTEAMPOLICIESSharedFolderJoinPolicySerializer : NSObject
///
/// Serializes `DBTEAMPOLICIESSharedFolderJoinPolicy` instances.
///
/// @param instance An instance of the `DBTEAMPOLICIESSharedFolderJoinPolicy`
/// API object.
///
/// @return A json-compatible dictionary representation of the
/// `DBTEAMPOLICIESSharedFolderJoinPolicy` API object.
///
+ (nullable NSDictionary<NSString *, id> *)serialize:(DBTEAMPOLICIESSharedFolderJoinPolicy *)instance;
///
/// Deserializes `DBTEAMPOLICIESSharedFolderJoinPolicy` instances.
///
/// @param dict A json-compatible dictionary representation of the
/// `DBTEAMPOLICIESSharedFolderJoinPolicy` API object.
///
/// @return An instantiation of the `DBTEAMPOLICIESSharedFolderJoinPolicy`
/// object.
///
+ (DBTEAMPOLICIESSharedFolderJoinPolicy *)deserialize:(NSDictionary<NSString *, id> *)dict;
@end
NS_ASSUME_NONNULL_END
| {
"pile_set_name": "Github"
} |
/***********************************************************************
!!!!!! DO NOT MODIFY !!!!!!
GacGen.exe Resource.xml
This file is generated by Workflow compiler
https://github.com/vczh-libraries
***********************************************************************/
#ifndef VCZH_WORKFLOW_COMPILER_GENERATED_DEMOREFLECTION
#define VCZH_WORKFLOW_COMPILER_GENERATED_DEMOREFLECTION
#include "Demo.h"
#ifndef VCZH_DEBUG_NO_REFLECTION
#include "GacUIReflection.h"
#endif
#if defined( _MSC_VER)
#pragma warning(push)
#pragma warning(disable:4250)
#elif defined(__GNUC__)
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wparentheses-equality"
#elif defined(__clang__)
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wparentheses-equality"
#endif
/***********************************************************************
Reflection
***********************************************************************/
namespace vl
{
namespace reflection
{
namespace description
{
#ifndef VCZH_DEBUG_NO_REFLECTION
DECL_TYPE_INFO(::demo::MainWindow)
DECL_TYPE_INFO(::demo::MainWindowConstructor)
#endif
extern bool LoadDemoTypes();
}
}
}
#if defined( _MSC_VER)
#pragma warning(pop)
#elif defined(__GNUC__)
#pragma GCC diagnostic pop
#elif defined(__clang__)
#pragma clang diagnostic pop
#endif
#endif
| {
"pile_set_name": "Github"
} |
abcdef abc def hij
klm nop qrs
abcdef abc def hij
tuv wxy z
| {
"pile_set_name": "Github"
} |
cask "font-cormorant-sc" do
version :latest
sha256 :no_check
# github.com/google/fonts/ was verified as official when first introduced to the cask
url "https://github.com/google/fonts/trunk/ofl/cormorantsc",
using: :svn,
trust_cert: true
name "Cormorant SC"
homepage "https://fonts.google.com/specimen/Cormorant+SC"
font "CormorantSC-Bold.ttf"
font "CormorantSC-Light.ttf"
font "CormorantSC-Medium.ttf"
font "CormorantSC-Regular.ttf"
font "CormorantSC-SemiBold.ttf"
end
| {
"pile_set_name": "Github"
} |
@comment $NetBSD: PLIST,v 1.5 2017/06/21 08:28:43 markd Exp $
share/texmf-dist/scripts/luaotfload/luaotfload-tool.lua
share/texmf-dist/scripts/luaotfload/mkcharacters
share/texmf-dist/scripts/luaotfload/mkglyphlist
share/texmf-dist/scripts/luaotfload/mkimport
share/texmf-dist/scripts/luaotfload/mkstatus
share/texmf-dist/scripts/luaotfload/mktests
share/texmf-dist/tex/luatex/luaotfload/fontloader-2017-02-11.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-basics-gen.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-basics-nod.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-basics.tex
share/texmf-dist/tex/luatex/luaotfload/fontloader-data-con.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-afk.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-cff.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-cid.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-con.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-def.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-dsp.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-gbn.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-ini.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-lua.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-map.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-ocl.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-one.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-onr.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-osd.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-ota.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-otc.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-oti.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-otj.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-otl.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-oto.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-otr.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-ots.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-oup.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-tfm.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-font-ttf.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-demo-vf-1.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-enc.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-ext.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-fonts-syn.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-fonts.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-fonts.tex
share/texmf-dist/tex/luatex/luaotfload/fontloader-l-boolean.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-l-file.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-l-function.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-l-io.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-l-lpeg.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-l-lua.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-l-math.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-l-string.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-l-table.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-languages.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-languages.tex
share/texmf-dist/tex/luatex/luaotfload/fontloader-math.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-math.tex
share/texmf-dist/tex/luatex/luaotfload/fontloader-mplib.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-mplib.tex
share/texmf-dist/tex/luatex/luaotfload/fontloader-plain.tex
share/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor-test.tex
share/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-preprocessor.tex
share/texmf-dist/tex/luatex/luaotfload/fontloader-reference.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib-test.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib-test.tex
share/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-swiglib.tex
share/texmf-dist/tex/luatex/luaotfload/fontloader-test.tex
share/texmf-dist/tex/luatex/luaotfload/fontloader-util-fil.lua
share/texmf-dist/tex/luatex/luaotfload/fontloader-util-str.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-auxiliary.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-blacklist.cnf
share/texmf-dist/tex/luatex/luaotfload/luaotfload-characters.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-colors.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-configuration.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-database.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-diagnostics.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-features.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-glyphlist.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-init.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-letterspace.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-loaders.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-log.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-main.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-parsers.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-resolvers.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload-status.lua
share/texmf-dist/tex/luatex/luaotfload/luaotfload.sty
| {
"pile_set_name": "Github"
} |
--- sandbox/linux/BUILD.gn.orig 2019-04-08 08:18:26 UTC
+++ sandbox/linux/BUILD.gn
@@ -12,12 +12,12 @@ if (is_android) {
}
declare_args() {
- compile_suid_client = is_linux
+ compile_suid_client = is_linux && !is_bsd
- compile_credentials = is_linux
+ compile_credentials = is_linux && !is_bsd
# On Android, use plain GTest.
- use_base_test_suite = is_linux
+ use_base_test_suite = is_linux && !is_bsd
}
if (is_nacl_nonsfi) {
@@ -379,7 +379,7 @@ component("sandbox_services") {
public_deps += [ ":sandbox_services_headers" ]
}
- if (is_nacl_nonsfi) {
+ if (is_nacl_nonsfi || is_bsd) {
cflags = [ "-fgnu-inline-asm" ]
sources -= [
@@ -387,6 +387,8 @@ component("sandbox_services") {
"services/init_process_reaper.h",
"services/scoped_process.cc",
"services/scoped_process.h",
+ "services/syscall_wrappers.cc",
+ "services/syscall_wrappers.h",
"services/yama.cc",
"services/yama.h",
"syscall_broker/broker_channel.cc",
@@ -405,6 +407,10 @@ component("sandbox_services") {
"syscall_broker/broker_process.h",
"syscall_broker/broker_simple_message.cc",
"syscall_broker/broker_simple_message.h",
+ ]
+ sources += [
+ "services/libc_interceptor.cc",
+ "services/libc_interceptor.h",
]
} else if (!is_android) {
sources += [
| {
"pile_set_name": "Github"
} |
--recursive
--require @babel/register
| {
"pile_set_name": "Github"
} |
<HTML><HEAD>
<TITLE>Invalid URL</TITLE>
</HEAD><BODY>
<H1>Invalid URL</H1>
The requested URL "[no URL]", is invalid.<p>
Reference #9.44952317.1507271057.135fad8
</BODY></HTML>
| {
"pile_set_name": "Github"
} |
var config = {
type: Phaser.AUTO,
parent: 'phaser-example',
width: 800,
height: 600,
scene: {
create: create
},
};
var game = new Phaser.Game(config);
function create() {
var graphics = this.add.graphics();
drawStar(graphics, 100, 300, 4, 50, 50 / 2, 0xffff00, 0xff0000);
drawStar(graphics, 400, 300, 5, 100, 100 / 2, 0xffff00, 0xff0000);
drawStar(graphics, 700, 300, 6, 50, 50 / 2, 0xffff00, 0xff0000);
}
function drawStar (graphics, cx, cy, spikes, outerRadius, innerRadius, color, lineColor)
{
var rot = Math.PI / 2 * 3;
var x = cx;
var y = cy;
var step = Math.PI / spikes;
graphics.lineStyle(4, lineColor, 1);
graphics.fillStyle(color, 1);
graphics.beginPath();
graphics.moveTo(cx, cy - outerRadius);
for (i = 0; i < spikes; i++)
{
x = cx + Math.cos(rot) * outerRadius;
y = cy + Math.sin(rot) * outerRadius;
graphics.lineTo(x, y);
rot += step;
x = cx + Math.cos(rot) * innerRadius;
y = cy + Math.sin(rot) * innerRadius;
graphics.lineTo(x, y);
rot += step;
}
graphics.lineTo(cx, cy - outerRadius);
graphics.closePath();
graphics.fillPath();
graphics.strokePath();
}
| {
"pile_set_name": "Github"
} |
.theme-dusk,.theme-midnight {
.hljs {
display: block;
overflow-x: auto;
background: #232323;
color: #e6e1dc;
}
.hljs-comment,
.hljs-quote {
color: #bc9458;
font-style: italic;
}
.hljs-keyword,
.hljs-selector-tag {
color: #c26230;
}
.hljs-string,
.hljs-number,
.hljs-regexp,
.hljs-variable,
.hljs-template-variable {
color: #a5c261;
}
.hljs-subst {
color: #519f50;
}
.hljs-tag,
.hljs-name {
color: #e8bf6a;
}
.hljs-type {
color: #da4939;
}
.hljs-symbol,
.hljs-bullet,
.hljs-built_in,
.hljs-builtin-name,
.hljs-attr,
.hljs-link {
color: #6d9cbe;
}
.hljs-params {
color: #d0d0ff;
}
.hljs-attribute {
color: #cda869;
}
.hljs-meta {
color: #9b859d;
}
.hljs-title,
.hljs-section {
color: #ffc66d;
}
.hljs-addition {
background-color: #144212;
color: #e6e1dc;
display: inline-block;
width: 100%;
}
.hljs-deletion {
background-color: #600;
color: #e6e1dc;
display: inline-block;
width: 100%;
}
.hljs-selector-class {
color: #9b703f;
}
.hljs-selector-id {
color: #8b98ab;
}
.hljs-emphasis {
font-style: italic;
}
.hljs-strong {
font-weight: bold;
}
.hljs-link {
text-decoration: underline;
}
} | {
"pile_set_name": "Github"
} |
#ifndef CREATE_EMPTY_DIRECTED_GRAPH_WITH_GRAPH_NAME_H
#define CREATE_EMPTY_DIRECTED_GRAPH_WITH_GRAPH_NAME_H
#include <boost/graph/adjacency_list.hpp>
boost::adjacency_list<boost::vecS, boost::vecS, boost::directedS,
boost::no_property, boost::no_property,
boost::property<boost::graph_name_t, std::string>>
create_empty_directed_graph_with_graph_name() noexcept;
#endif // CREATE_EMPTY_DIRECTED_GRAPH_WITH_GRAPH_NAME_H
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.stanbol.entityhub.web.reader;
import java.io.IOException;
import java.io.InputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.ServletContext;
import javax.ws.rs.Consumes;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.ext.MessageBodyReader;
import javax.ws.rs.ext.Provider;
import org.apache.clerezza.commons.rdf.Graph;
import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
import org.apache.clerezza.commons.rdf.Triple;
import org.apache.clerezza.commons.rdf.IRI;
import org.apache.clerezza.rdf.core.serializedform.Parser;
import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
import org.apache.clerezza.rdf.core.serializedform.UnsupportedParsingFormatException;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.Service;
import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
import org.apache.stanbol.entityhub.jersey.utils.JerseyUtils;
import org.apache.stanbol.entityhub.jersey.utils.MessageBodyReaderUtils;
import org.apache.stanbol.entityhub.jersey.utils.MessageBodyReaderUtils.RequestData;
import org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory;
import org.apache.stanbol.entityhub.servicesapi.model.Representation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Provides support for reading Representation form Requests. This implementation
* supports all RDF supports as well as {@link MediaType#APPLICATION_FORM_URLENCODED}
* - in case the data are sent from an HTML form - and
* {@link MediaType#MULTIPART_FORM_DATA} - mime encoded data.
* In case of an HTML form the encoding need to be specified by the parameter
* "encoding" for the entity data the parameters "entity" or "content" can be
* used.
* @author Rupert Westenthaler
*
*/
@Component
@Service(Object.class)
@Property(name="javax.ws.rs", boolValue=true)
@Provider
@Consumes({ //First the data types directly supported for parsing representations
MediaType.APPLICATION_JSON, SupportedFormat.N3, SupportedFormat.N_TRIPLE,
SupportedFormat.RDF_XML, SupportedFormat.TURTLE, SupportedFormat.X_TURTLE,
SupportedFormat.RDF_JSON,
//finally this also supports sending the data as form and mime multipart
MediaType.APPLICATION_FORM_URLENCODED,
MediaType.MULTIPART_FORM_DATA})
public class RepresentationReader implements MessageBodyReader<Map<String,Representation>> {
private static final Logger log = LoggerFactory.getLogger(RepresentationReader.class);
public static final Set<String> supportedMediaTypes;
private static final MediaType DEFAULT_ACCEPTED_MEDIA_TYPE = MediaType.TEXT_PLAIN_TYPE;
static {
Set<String> types = new HashSet<String>();
//ensure everything is lower case
types.add(MediaType.APPLICATION_JSON.toLowerCase());
types.add(SupportedFormat.N3.toLowerCase());
types.add(SupportedFormat.N_TRIPLE.toLowerCase());
types.add(SupportedFormat.RDF_JSON.toLowerCase());
types.add(SupportedFormat.RDF_XML.toLowerCase());
types.add(SupportedFormat.TURTLE.toLowerCase());
types.add(SupportedFormat.X_TURTLE.toLowerCase());
supportedMediaTypes = Collections.unmodifiableSet(types);
}
@Reference
private Parser parser;
@Override
public boolean isReadable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
String mediaTypeWithoutParameter =
mediaType.getType().toLowerCase()+'/'+
mediaType.getSubtype().toLowerCase();
log.debug("isreadable: [genericType: {}| mediaType {}]",
genericType,mediaTypeWithoutParameter);
//second the media type
boolean mediaTypeOK = (//the MimeTypes of Representations
supportedMediaTypes.contains(mediaTypeWithoutParameter) ||
//as well as URL encoded
MediaType.APPLICATION_FORM_URLENCODED.equals(mediaTypeWithoutParameter) ||
//and mime multipart
MediaType.MULTIPART_FORM_DATA.equals(mediaTypeWithoutParameter));
boolean typeOk = JerseyUtils.testParameterizedType(Map.class,
new Class[]{String.class,Representation.class}, genericType);
log.debug("type is {} for {} against Map<String,Representation>",
typeOk ? "compatible" : "incompatible" ,genericType);
return typeOk && mediaTypeOK;
}
@Override
public Map<String,Representation> readFrom(Class<Map<String,Representation>> type,
Type genericType,
Annotation[] annotations,
MediaType mediaType,
MultivaluedMap<String,String> httpHeaders,
InputStream entityStream) throws IOException, WebApplicationException {
log.info("Read Representations from Request Data");
long start = System.currentTimeMillis();
//(1) get the charset and the acceptedMediaType
String charset = "UTF-8";
if(mediaType.getParameters().containsKey("charset")){
charset = mediaType.getParameters().get("charset");
}
MediaType acceptedMediaType = getAcceptedMediaType(httpHeaders);
log.info("readFrom: mediaType {} | accepted {} | charset {}",
new Object[]{mediaType,acceptedMediaType,charset});
// (2) read the Content from the request (this needs to deal with
// MediaType.APPLICATION_FORM_URLENCODED_TYPE and
// MediaType.MULTIPART_FORM_DATA_TYPE requests!
RequestData content;
if(mediaType.isCompatible(MediaType.APPLICATION_FORM_URLENCODED_TYPE)) {
try {
content = MessageBodyReaderUtils.formForm(entityStream, charset,
"encoding",Arrays.asList("entity","content"));
} catch (IllegalArgumentException e) {
log.info("Bad Request: {}",e);
throw new WebApplicationException(
Response.status(Status.BAD_REQUEST).entity(e.toString()).
header(HttpHeaders.ACCEPT, acceptedMediaType).build());
}
if(content.getMediaType() == null){
String message = String.format(
"Missing parameter %s used to specify the media type" +
"(supported values: %s",
"encoding",supportedMediaTypes);
log.info("Bad Request: {}",message);
throw new WebApplicationException(
Response.status(Status.BAD_REQUEST).entity(message).
header(HttpHeaders.ACCEPT, acceptedMediaType).build());
}
if(!isSupported(content.getMediaType())){
String message = String.format(
"Unsupported Content-Type specified by parameter " +
"encoding=%s (supported: %s)",
content.getMediaType().toString(),supportedMediaTypes);
log.info("Bad Request: {}",message);
throw new WebApplicationException(
Response.status(Status.BAD_REQUEST).
entity(message).
header(HttpHeaders.ACCEPT, acceptedMediaType).build());
}
} else if(mediaType.isCompatible(MediaType.MULTIPART_FORM_DATA_TYPE)){
log.info("read from MimeMultipart");
List<RequestData> contents;
try {
contents = MessageBodyReaderUtils.fromMultipart(entityStream, mediaType);
} catch (IllegalArgumentException e) {
log.info("Bad Request: {}",e.toString());
throw new WebApplicationException(
Response.status(Status.BAD_REQUEST).entity(e.toString()).
header(HttpHeaders.ACCEPT, acceptedMediaType).build());
}
if(contents.isEmpty()){
String message = "Request does not contain any Mime BodyParts.";
log.info("Bad Request: {}",message);
throw new WebApplicationException(
Response.status(Status.BAD_REQUEST).entity(message).
header(HttpHeaders.ACCEPT, acceptedMediaType).build());
} else if(contents.size()>1){
//print warnings about ignored parts
log.warn("{} Request contains more than one Parts: others than " +
"the first will be ignored",
MediaType.MULTIPART_FORM_DATA_TYPE);
for(int i=1;i<contents.size();i++){
RequestData ignored = contents.get(i);
log.warn(" ignore Content {}: Name {}| MediaType {}",
new Object[] {i+1,ignored.getName(),ignored.getMediaType()});
}
}
content = contents.get(0);
if(content.getMediaType() == null){
String message = String.format(
"MediaType not specified for mime body part for file %s. " +
"The media type must be one of the supported values: %s",
content.getName(), supportedMediaTypes);
log.info("Bad Request: {}",message);
throw new WebApplicationException(
Response.status(Status.BAD_REQUEST).entity(message).
header(HttpHeaders.ACCEPT, acceptedMediaType).build());
}
if(!isSupported(content.getMediaType())){
String message = String.format(
"Unsupported Content-Type %s specified for mime body part " +
"for file %s (supported: %s)",
content.getMediaType(),content.getName(),supportedMediaTypes);
log.info("Bad Request: {}",message);
throw new WebApplicationException(
Response.status(Status.BAD_REQUEST).
entity(message).
header(HttpHeaders.ACCEPT, acceptedMediaType).build());
}
} else {
content = new RequestData(mediaType, null, entityStream);
}
long readingCompleted = System.currentTimeMillis();
log.info(" ... reading request data {}ms",readingCompleted-start);
Map<String,Representation> parsed = parseFromContent(content,acceptedMediaType);
long parsingCompleted = System.currentTimeMillis();
log.info(" ... parsing data {}ms",parsingCompleted-readingCompleted);
return parsed;
}
public Map<String,Representation> parseFromContent(RequestData content, MediaType acceptedMediaType){
// (3) Parse the Representtion(s) form the entity stream
if(content.getMediaType().isCompatible(MediaType.APPLICATION_JSON_TYPE)){
//parse from json
throw new UnsupportedOperationException("Parsing of JSON not yet implemented :(");
} else if(isSupported(content.getMediaType())){ //from RDF serialisation
RdfValueFactory valueFactory = RdfValueFactory.getInstance();
Map<String,Representation> representations = new HashMap<String,Representation>();
Set<BlankNodeOrIRI> processed = new HashSet<BlankNodeOrIRI>();
Graph graph = new IndexedGraph();
try {
parser.parse(graph,content.getEntityStream(), content.getMediaType().toString());
} catch (UnsupportedParsingFormatException e) {
//String acceptedMediaType = httpHeaders.getFirst("Accept");
//throw an internal server Error, because we check in
//isReadable(..) for supported types and still we get here a
//unsupported format -> therefore it looks like an configuration
//error the server (e.g. a missing Bundle with the required bundle)
String message = "Unable to create the Parser for the supported format"
+content.getMediaType()+" ("+e+")";
log.error(message,e);
throw new WebApplicationException(
Response.status(Status.INTERNAL_SERVER_ERROR).
entity(message).
header(HttpHeaders.ACCEPT, acceptedMediaType).build());
} catch (RuntimeException e){
//NOTE: Clerezza seams not to provide specific exceptions on
// parsing errors. Hence the catch for all RuntimeException
String message = "Unable to parse the provided RDF data (format: "
+content.getMediaType()+", message: "+e.getMessage()+")";
log.error(message,e);
throw new WebApplicationException(
Response.status(Status.BAD_REQUEST).
entity(message).
header(HttpHeaders.ACCEPT, acceptedMediaType).build());
}
for(Iterator<Triple> st = graph.iterator();st.hasNext();){
BlankNodeOrIRI resource = st.next().getSubject();
if(resource instanceof IRI && processed.add(resource)){
//build a new representation
representations.put(((IRI)resource).getUnicodeString(),
valueFactory.createRdfRepresentation((IRI)resource, graph));
}
}
return representations;
} else { //unsupported media type
String message = String.format(
"Parsed Content-Type '%s' is not one of the supported %s",
content.getMediaType(),supportedMediaTypes);
log.info("Bad Request: {}",message);
throw new WebApplicationException(
Response.status(Status.BAD_REQUEST).
entity(message).
header(HttpHeaders.ACCEPT, acceptedMediaType).build());
}
}
/**
* Internally used to get the accepted media type used when returning
* {@link WebApplicationException}s.
* @param httpHeaders
* @param acceptedMediaType
* @return
*/
private static MediaType getAcceptedMediaType(MultivaluedMap<String,String> httpHeaders) {
MediaType acceptedMediaType;
String acceptedMediaTypeString = httpHeaders.getFirst("Accept");
if(acceptedMediaTypeString != null){
try {
acceptedMediaType = MediaType.valueOf(acceptedMediaTypeString);
if(acceptedMediaType.isWildcardType()){
acceptedMediaType = DEFAULT_ACCEPTED_MEDIA_TYPE;
}
} catch (IllegalArgumentException e) {
acceptedMediaType = DEFAULT_ACCEPTED_MEDIA_TYPE;
}
} else {
acceptedMediaType = DEFAULT_ACCEPTED_MEDIA_TYPE;
}
return acceptedMediaType;
}
/**
* Converts the type and the subtype of the parsed media type to the
* string representation as stored in {@link #supportedMediaTypes} and than
* checks if the parsed media type is contained in this list.
* @param mediaType the MediaType instance to check
* @return <code>true</code> if the parsed media type is not
* <code>null</code> and supported.
*/
private boolean isSupported(MediaType mediaType){
return mediaType == null ? false : supportedMediaTypes.contains(
mediaType.getType().toLowerCase()+'/'+
mediaType.getSubtype().toLowerCase());
}
}
| {
"pile_set_name": "Github"
} |
///
/// Massively by HTML5 UP
/// html5up.net | @ajlkn
/// Free for personal and commercial use under the CCA 3.0 license (html5up.net/license)
///
/* Wrapper */
#wrapper {
@include vendor('transition', 'opacity #{_duration(menu)} ease');
position: relative;
z-index: 1;
overflow: hidden;
> .bg {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
background-color: _palette(wrapper-bg);
background-image: url('../../images/overlay.png'), linear-gradient(0deg, rgba(0,0,0,0.1), rgba(0,0,0,0.1)), url('../../images/bg.jpg');
background-size: auto, auto, 100% auto;
background-position: center, center, top center;
background-repeat: repeat, no-repeat, no-repeat;
background-attachment: scroll, scroll, scroll;
z-index: -1;
&.fixed {
position: fixed;
width: 100vw;
height: 100vh;
}
}
&.fade-in {
&:before {
@include vendor('pointer-events', 'none');
@include vendor('transition', 'opacity 1s ease-in-out');
@include vendor('transition-delay', '0.75s');
background: _palette(invert, bg);
content: '';
display: block;
height: 100%;
left: 0;
opacity: 0;
position: fixed;
top: 0;
width: 100%;
}
body.is-loading & {
&:before {
opacity: 1;
}
}
}
@include orientation(portrait) {
> .bg {
background-size: auto, auto, auto 175%;
}
}
} | {
"pile_set_name": "Github"
} |
# frozen_string_literal: true
require File.expand_path('lib/jekyll-last-modified-at/version.rb', __dir__)
Gem::Specification.new do |s|
s.name = 'jekyll-last-modified-at'
s.version = Jekyll::LastModifiedAt::VERSION
s.summary = 'A liquid tag for Jekyll to indicate the last time a file was modified.'
s.authors = 'Garen J. Torikian'
s.homepage = 'https://github.com/gjtorikian/jekyll-last-modified-at'
s.license = 'MIT'
s.files = Dir['lib/**/*.rb']
s.add_dependency 'jekyll', '>= 3.7', ' < 5.0'
s.add_dependency 'posix-spawn', '~> 0.3.9'
s.add_development_dependency 'rake'
s.add_development_dependency 'rspec', '~> 3.4'
s.add_development_dependency 'rubocop'
s.add_development_dependency 'rubocop-performance'
s.add_development_dependency 'rubocop-standard'
s.add_development_dependency 'spork'
end
| {
"pile_set_name": "Github"
} |
#if !defined(BOOST_PP_IS_ITERATING)
///// header body
#ifndef BOOST_MPL_AUX778076_ADVANCE_BACKWARD_HPP_INCLUDED
#define BOOST_MPL_AUX778076_ADVANCE_BACKWARD_HPP_INCLUDED
// Copyright Aleksey Gurtovoy 2000-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// See http://www.boost.org/libs/mpl for documentation.
// $Id$
// $Date$
// $Revision$
#if !defined(BOOST_MPL_PREPROCESSING_MODE)
# include <boost/mpl/prior.hpp>
# include <boost/mpl/apply_wrap.hpp>
#endif
#include <boost/mpl/aux_/config/use_preprocessed.hpp>
#if !defined(BOOST_MPL_CFG_NO_PREPROCESSED_HEADERS) \
&& !defined(BOOST_MPL_PREPROCESSING_MODE)
# define BOOST_MPL_PREPROCESSED_HEADER advance_backward.hpp
# include <boost/mpl/aux_/include_preprocessed.hpp>
#else
# include <boost/mpl/limits/unrolling.hpp>
# include <boost/mpl/aux_/nttp_decl.hpp>
# include <boost/mpl/aux_/config/eti.hpp>
# include <boost/preprocessor/iterate.hpp>
# include <boost/preprocessor/cat.hpp>
# include <boost/preprocessor/inc.hpp>
namespace boost { namespace mpl { namespace aux {
// forward declaration
template< BOOST_MPL_AUX_NTTP_DECL(long, N) > struct advance_backward;
# define BOOST_PP_ITERATION_PARAMS_1 \
(3,(0, BOOST_MPL_LIMIT_UNROLLING, <boost/mpl/aux_/advance_backward.hpp>))
# include BOOST_PP_ITERATE()
// implementation for N that exceeds BOOST_MPL_LIMIT_UNROLLING
template< BOOST_MPL_AUX_NTTP_DECL(long, N) >
struct advance_backward
{
template< typename Iterator > struct apply
{
typedef typename apply_wrap1<
advance_backward<BOOST_MPL_LIMIT_UNROLLING>
, Iterator
>::type chunk_result_;
typedef typename apply_wrap1<
advance_backward<(
(N - BOOST_MPL_LIMIT_UNROLLING) < 0
? 0
: N - BOOST_MPL_LIMIT_UNROLLING
)>
, chunk_result_
>::type type;
};
};
}}}
#endif // BOOST_MPL_CFG_NO_PREPROCESSED_HEADERS
#endif // BOOST_MPL_AUX778076_ADVANCE_BACKWARD_HPP_INCLUDED
///// iteration, depth == 1
// For gcc 4.4 compatability, we must include the
// BOOST_PP_ITERATION_DEPTH test inside an #else clause.
#else // BOOST_PP_IS_ITERATING
#if BOOST_PP_ITERATION_DEPTH() == 1
#define i_ BOOST_PP_FRAME_ITERATION(1)
template<>
struct advance_backward< BOOST_PP_FRAME_ITERATION(1) >
{
template< typename Iterator > struct apply
{
typedef Iterator iter0;
#if i_ > 0
# define BOOST_PP_ITERATION_PARAMS_2 \
(3,(1, BOOST_PP_FRAME_ITERATION(1), <boost/mpl/aux_/advance_backward.hpp>))
# include BOOST_PP_ITERATE()
#endif
typedef BOOST_PP_CAT(iter,BOOST_PP_FRAME_ITERATION(1)) type;
};
#if defined(BOOST_MPL_CFG_MSVC_60_ETI_BUG)
/// ETI workaround
template<> struct apply<int>
{
typedef int type;
};
#endif
};
#undef i_
///// iteration, depth == 2
#elif BOOST_PP_ITERATION_DEPTH() == 2
# define AUX778076_ITER_0 BOOST_PP_CAT(iter,BOOST_PP_DEC(BOOST_PP_FRAME_ITERATION(2)))
# define AUX778076_ITER_1 BOOST_PP_CAT(iter,BOOST_PP_FRAME_ITERATION(2))
typedef typename prior<AUX778076_ITER_0>::type AUX778076_ITER_1;
# undef AUX778076_ITER_1
# undef AUX778076_ITER_0
#endif // BOOST_PP_ITERATION_DEPTH()
#endif // BOOST_PP_IS_ITERATING
| {
"pile_set_name": "Github"
} |
<UIView; frame = (0 0; 1112 834); autoresize = W+H; layer = <CALayer>>
| <UILabel; frame = (528.333 20; 55.6667 20.3333); text = 'What's'; userInteractionEnabled = NO; layer = <_UILabelLayer>>
| <UILabel; frame = (0 417; 25 20.3333); text = 'the'; userInteractionEnabled = NO; layer = <_UILabelLayer>>
| <UILabel; frame = (1073 417; 39 20.3333); text = 'point'; userInteractionEnabled = NO; layer = <_UILabelLayer>>
| <UILabel; frame = (552.333 816; 7.66667 18); text = '?'; userInteractionEnabled = NO; layer = <_UILabelLayer>> | {
"pile_set_name": "Github"
} |
package x509util
import (
"crypto/rand"
"crypto/rsa"
"crypto/x509"
"crypto/x509/pkix"
"testing"
)
func TestCreateCertificateRequest(t *testing.T) {
r := rand.Reader
priv, err := rsa.GenerateKey(r, 1024)
if err != nil {
t.Fatal(err)
}
template := CertificateRequest{
CertificateRequest: x509.CertificateRequest{
Subject: pkix.Name{
CommonName: "test.acme.co",
Country: []string{"US"},
},
},
ChallengePassword: "foobar",
}
derBytes, err := CreateCertificateRequest(r, &template, priv)
if err != nil {
t.Fatal(err)
}
out, err := x509.ParseCertificateRequest(derBytes)
if err != nil {
t.Fatalf("failed to create certificate request: %s", err)
}
if err := out.CheckSignature(); err != nil {
t.Errorf("failed to check certificate request signature: %s", err)
}
challenge, err := ParseChallengePassword(derBytes)
if err != nil {
t.Fatalf("failed to parse challengePassword attribute: %s", err)
}
if have, want := challenge, template.ChallengePassword; have != want {
t.Errorf("have %s, want %s", have, want)
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vcs.impl
import com.intellij.ProjectTopics
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.components.service
import com.intellij.openapi.extensions.ExtensionNotApplicableException
import com.intellij.openapi.module.Module
import com.intellij.openapi.module.ModuleManager
import com.intellij.openapi.project.ModuleListener
import com.intellij.openapi.project.Project
import com.intellij.openapi.project.rootManager
import com.intellij.openapi.roots.ModuleRootEvent
import com.intellij.openapi.roots.ModuleRootListener
import com.intellij.openapi.startup.StartupActivity
import com.intellij.openapi.vcs.AbstractVcs
import com.intellij.openapi.vcs.ProjectLevelVcsManager
import com.intellij.openapi.vcs.VcsDirectoryMapping
import com.intellij.openapi.vfs.VirtualFile
internal class ModuleVcsDetector(private val project: Project) {
private val vcsManager by lazy(LazyThreadSafetyMode.NONE) {
(ProjectLevelVcsManager.getInstance(project) as ProjectLevelVcsManagerImpl)
}
internal class MyPostStartUpActivity : StartupActivity.DumbAware {
init {
if (ApplicationManager.getApplication().isUnitTestMode) {
throw ExtensionNotApplicableException.INSTANCE
}
}
override fun runActivity(project: Project) {
val vcsDetector = project.service<ModuleVcsDetector>()
val listener = vcsDetector.MyModulesListener()
val busConnection = project.messageBus.connect()
busConnection.subscribe(ProjectTopics.MODULES, listener)
busConnection.subscribe(ProjectTopics.PROJECT_ROOTS, listener)
if (vcsDetector.vcsManager.needAutodetectMappings()) {
vcsDetector.autoDetectVcsMappings(true)
}
}
}
private inner class MyModulesListener : ModuleRootListener, ModuleListener {
private val myMappingsForRemovedModules: MutableList<VcsDirectoryMapping> = mutableListOf()
override fun beforeRootsChange(event: ModuleRootEvent) {
myMappingsForRemovedModules.clear()
}
override fun rootsChanged(event: ModuleRootEvent) {
myMappingsForRemovedModules.forEach { mapping -> vcsManager.removeDirectoryMapping(mapping) }
// the check calculates to true only before user has done any change to mappings, i.e. in case modules are detected/added automatically
// on start etc (look inside)
if (vcsManager.needAutodetectMappings()) {
autoDetectVcsMappings(false)
}
}
override fun moduleAdded(project: Project, module: Module) {
myMappingsForRemovedModules.removeAll(getMappings(module))
autoDetectModuleVcsMapping(module)
}
override fun beforeModuleRemoved(project: Project, module: Module) {
myMappingsForRemovedModules.addAll(getMappings(module))
}
}
private fun autoDetectVcsMappings(tryMapPieces: Boolean) {
if (vcsManager.haveDefaultMapping() != null) return
val usedVcses = mutableSetOf<AbstractVcs?>()
val detectedRoots = mutableSetOf<Pair<VirtualFile, AbstractVcs>>()
val roots = ModuleManager.getInstance(project).modules.flatMap { it.rootManager.contentRoots.asIterable() }.distinct()
for (root in roots) {
val moduleVcs = vcsManager.findVersioningVcs(root)
if (moduleVcs != null) {
detectedRoots.add(Pair(root, moduleVcs))
}
usedVcses.add(moduleVcs) // put 'null' for unmapped module
}
val commonVcs = usedVcses.singleOrNull()
if (commonVcs != null) {
// Remove existing mappings that will duplicate added <Project> mapping.
val rootPaths = roots.map { it.path }.toSet()
val additionalMappings = vcsManager.directoryMappings.filter { it.directory !in rootPaths }
vcsManager.setAutoDirectoryMappings(additionalMappings + VcsDirectoryMapping.createDefault(commonVcs.name))
}
else if (tryMapPieces) {
val newMappings = detectedRoots.map { (root, vcs) -> VcsDirectoryMapping(root.path, vcs.name) }
vcsManager.setAutoDirectoryMappings(vcsManager.directoryMappings + newMappings)
}
}
private fun autoDetectModuleVcsMapping(module: Module) {
if (vcsManager.haveDefaultMapping() != null) return
val newMappings = mutableListOf<VcsDirectoryMapping>()
for (file in module.rootManager.contentRoots) {
val vcs = vcsManager.findVersioningVcs(file)
if (vcs != null && vcs !== vcsManager.getVcsFor(file)) {
newMappings.add(VcsDirectoryMapping(file.path, vcs.name))
}
}
if (newMappings.isNotEmpty()) {
vcsManager.setAutoDirectoryMappings(vcsManager.directoryMappings + newMappings)
}
}
private fun getMappings(module: Module): List<VcsDirectoryMapping> {
return module.rootManager.contentRoots
.mapNotNull { root -> vcsManager.directoryMappings.firstOrNull { it.directory == root.path } }
}
}
| {
"pile_set_name": "Github"
} |
package io.gitlab.arturbosch.detekt.generator.collection
import io.gitlab.arturbosch.detekt.api.DetektVisitor
import io.gitlab.arturbosch.detekt.generator.collection.exception.InvalidDocumentationException
import io.gitlab.arturbosch.detekt.rules.isOverride
import org.jetbrains.kotlin.psi.KtCallExpression
import org.jetbrains.kotlin.psi.KtClassOrObject
import org.jetbrains.kotlin.psi.KtFile
import org.jetbrains.kotlin.psi.KtProperty
import org.jetbrains.kotlin.psi.KtReferenceExpression
import org.jetbrains.kotlin.psi.KtSuperTypeList
import org.jetbrains.kotlin.psi.KtValueArgumentList
import org.jetbrains.kotlin.psi.psiUtil.containingClass
import org.jetbrains.kotlin.psi.psiUtil.referenceExpression
data class MultiRule(
val name: String,
val rules: List<String> = listOf()
) {
operator fun contains(ruleName: String) = ruleName in this.rules
}
private val multiRule = io.gitlab.arturbosch.detekt.api.MultiRule::class.simpleName ?: ""
class MultiRuleCollector : Collector<MultiRule> {
override val items = mutableListOf<MultiRule>()
override fun visit(file: KtFile) {
val visitor = MultiRuleVisitor()
file.accept(visitor)
if (visitor.containsMultiRule) {
items.add(visitor.getMultiRule())
}
}
}
class MultiRuleVisitor : DetektVisitor() {
val containsMultiRule
get() = classesMap.any { it.value }
private var classesMap = mutableMapOf<String, Boolean>()
private var name = ""
private val rulesVisitor = RuleListVisitor()
private val properties: MutableMap<String, String> = mutableMapOf()
fun getMultiRule(): MultiRule {
val rules = mutableListOf<String>()
val ruleProperties = rulesVisitor.ruleProperties
.mapNotNull { properties[it] }
rules.addAll(ruleProperties)
rules.addAll(rulesVisitor.ruleNames)
if (name.isEmpty()) {
throw InvalidDocumentationException("MultiRule without name found.")
}
if (rules.isEmpty()) {
throw InvalidDocumentationException("MultiRule $name contains no rules.")
}
return MultiRule(name, rules)
}
override fun visitSuperTypeList(list: KtSuperTypeList) {
val isMultiRule = list.entries
?.mapNotNull { it.typeAsUserType?.referencedName }
?.any { it == multiRule } ?: false
val containingClass = list.containingClass()
val className = containingClass?.name
if (containingClass != null && className != null && !classesMap.containsKey(className)) {
classesMap[className] = isMultiRule
}
super.visitSuperTypeList(list)
}
override fun visitClassOrObject(classOrObject: KtClassOrObject) {
super.visitClassOrObject(classOrObject)
if (classesMap[classOrObject.name] != true) {
return
}
name = classOrObject.name?.trim() ?: ""
}
override fun visitProperty(property: KtProperty) {
super.visitProperty(property)
if (classesMap[property.containingClass()?.name] != true) {
return
}
if (property.isOverride() && property.name != null && property.name == "rules") {
property.accept(rulesVisitor)
} else {
val name = property.name
val initializer = property.initializer?.referenceExpression()?.text
if (name != null && initializer != null) {
properties[name] = initializer
}
}
}
}
class RuleListVisitor : DetektVisitor() {
var ruleNames: MutableSet<String> = mutableSetOf()
private set
var ruleProperties: MutableSet<String> = mutableSetOf()
private set
override fun visitValueArgumentList(list: KtValueArgumentList) {
super.visitValueArgumentList(list)
val argumentExpressions = list.arguments.map { it.getArgumentExpression() }
// Call Expression = Constructor of rule
ruleNames.addAll(argumentExpressions
.filterIsInstance<KtCallExpression>()
.map { it.calleeExpression?.text ?: "" })
// Reference Expression = variable we need to search for
ruleProperties.addAll(argumentExpressions
.filterIsInstance<KtReferenceExpression>()
.map { it.text ?: "" })
}
}
| {
"pile_set_name": "Github"
} |
/**
* ScriptDev2 is an extension for mangos providing enhanced features for
* area triggers, creatures, game objects, instances, items, and spells beyond
* the default database scripting in mangos.
*
* Copyright (C) 2006-2013 ScriptDev2 <http://www.scriptdev2.com/>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* World of Warcraft, and all World of Warcraft or Warcraft art, images,
* and lore are copyrighted by Blizzard Entertainment, Inc.
*/
/**
* ScriptData
* SDName: bug_trio
* SD%Complete: 75
* SDComment: Summon Player spell NYI; Poison Cloud damage spell NYI; Timers need adjustments
* SDCategory: Temple of Ahn'Qiraj
* EndScriptData
*/
#include "precompiled.h"
#include "temple_of_ahnqiraj.h"
enum
{
// kri
SPELL_CLEAVE = 26350,
SPELL_TOXIC_VOLLEY = 25812,
SPELL_SUMMON_CLOUD = 26590, // summons 15933
// vem
SPELL_CHARGE = 26561,
SPELL_VENGEANCE = 25790,
SPELL_KNOCKBACK = 26027,
// yauj
SPELL_HEAL = 25807,
SPELL_FEAR = 26580,
NPC_YAUJ_BROOD = 15621
};
struct MANGOS_DLL_DECL boss_kriAI : public ScriptedAI
{
boss_kriAI(Creature* pCreature) : ScriptedAI(pCreature)
{
m_pInstance = (ScriptedInstance*)pCreature->GetInstanceData();
Reset();
}
ScriptedInstance* m_pInstance;
uint32 m_uiCleaveTimer;
uint32 m_uiToxicVolleyTimer;
void Reset() override
{
m_uiCleaveTimer = urand(4000, 8000);
m_uiToxicVolleyTimer = urand(6000, 12000);
}
void JustDied(Unit* /*pKiller*/) override
{
// poison cloud on death
DoCastSpellIfCan(m_creature, SPELL_SUMMON_CLOUD, CAST_TRIGGERED);
if (!m_pInstance)
{
return;
}
// If the other 2 bugs are still alive, make unlootable
if (m_pInstance->GetData(TYPE_BUG_TRIO) != DONE)
{
m_creature->RemoveFlag(UNIT_DYNAMIC_FLAGS, UNIT_DYNFLAG_LOOTABLE);
m_pInstance->SetData(TYPE_BUG_TRIO, SPECIAL);
}
}
void JustReachedHome() override
{
if (m_pInstance)
{
m_pInstance->SetData(TYPE_BUG_TRIO, FAIL);
}
}
void UpdateAI(const uint32 uiDiff) override
{
// Return since we have no target
if (!m_creature->SelectHostileTarget() || !m_creature->getVictim())
{
return;
}
// Cleave_Timer
if (m_uiCleaveTimer < uiDiff)
{
if (DoCastSpellIfCan(m_creature->getVictim(), SPELL_CLEAVE) == CAST_OK)
{
m_uiCleaveTimer = urand(5000, 12000);
}
}
else
{ m_uiCleaveTimer -= uiDiff; }
// ToxicVolley_Timer
if (m_uiToxicVolleyTimer < uiDiff)
{
if (DoCastSpellIfCan(m_creature, SPELL_TOXIC_VOLLEY) == CAST_OK)
{
m_uiToxicVolleyTimer = urand(10000, 15000);
}
}
else
{ m_uiToxicVolleyTimer -= uiDiff; }
DoMeleeAttackIfReady();
}
};
struct MANGOS_DLL_DECL boss_vemAI : public ScriptedAI
{
boss_vemAI(Creature* pCreature) : ScriptedAI(pCreature)
{
m_pInstance = (ScriptedInstance*)pCreature->GetInstanceData();
Reset();
}
ScriptedInstance* m_pInstance;
uint32 m_uiChargeTimer;
uint32 m_uiKnockBackTimer;
void Reset() override
{
m_uiChargeTimer = urand(15000, 27000);
m_uiKnockBackTimer = urand(8000, 20000);
}
void JustDied(Unit* /*pKiller*/) override
{
// Enrage the other bugs
DoCastSpellIfCan(m_creature, SPELL_VENGEANCE, CAST_TRIGGERED);
if (!m_pInstance)
{
return;
}
// If the other 2 bugs are still alive, make unlootable
if (m_pInstance->GetData(TYPE_BUG_TRIO) != DONE)
{
m_creature->RemoveFlag(UNIT_DYNAMIC_FLAGS, UNIT_DYNFLAG_LOOTABLE);
m_pInstance->SetData(TYPE_BUG_TRIO, SPECIAL);
}
}
void JustReachedHome() override
{
if (m_pInstance)
{
m_pInstance->SetData(TYPE_BUG_TRIO, FAIL);
}
}
void UpdateAI(const uint32 uiDiff) override
{
// Return since we have no target
if (!m_creature->SelectHostileTarget() || !m_creature->getVictim())
{
return;
}
// Charge_Timer
if (m_uiChargeTimer < uiDiff)
{
if (Unit* pTarget = m_creature->SelectAttackingTarget(ATTACKING_TARGET_RANDOM, 0))
{
if (DoCastSpellIfCan(pTarget, SPELL_CHARGE) == CAST_OK)
{
m_uiChargeTimer = urand(8000, 16000);
}
}
}
else
{ m_uiChargeTimer -= uiDiff; }
// KnockBack_Timer
if (m_uiKnockBackTimer < uiDiff)
{
if (DoCastSpellIfCan(m_creature, SPELL_KNOCKBACK) == CAST_OK)
{
if (m_creature->GetThreatManager().getThreat(m_creature->getVictim()))
{
m_creature->GetThreatManager().modifyThreatPercent(m_creature->getVictim(), -80);
}
m_uiKnockBackTimer = urand(15000, 25000);
}
}
else
{ m_uiKnockBackTimer -= uiDiff; }
DoMeleeAttackIfReady();
}
};
struct MANGOS_DLL_DECL boss_yaujAI : public ScriptedAI
{
boss_yaujAI(Creature* pCreature) : ScriptedAI(pCreature)
{
m_pInstance = (ScriptedInstance*)pCreature->GetInstanceData();
Reset();
}
ScriptedInstance* m_pInstance;
uint32 m_uiHealTimer;
uint32 m_uiFearTimer;
void Reset() override
{
m_uiHealTimer = urand(25000, 40000);
m_uiFearTimer = urand(12000, 24000);
}
void JustDied(Unit* /*Killer*/) override
{
// Spawn 10 yauj brood on death
float fX, fY, fZ;
for (int i = 0; i < 10; ++i)
{
m_creature->GetRandomPoint(m_creature->GetPositionX(), m_creature->GetPositionY(), m_creature->GetPositionZ(), 10.0f, fX, fY, fZ);
m_creature->SummonCreature(NPC_YAUJ_BROOD, fX, fY, fZ, 0.0f, TEMPSUMMON_TIMED_OOC_DESPAWN, 30000);
}
if (!m_pInstance)
{
return;
}
// If the other 2 bugs are still alive, make unlootable
if (m_pInstance->GetData(TYPE_BUG_TRIO) != DONE)
{
m_creature->RemoveFlag(UNIT_DYNAMIC_FLAGS, UNIT_DYNFLAG_LOOTABLE);
m_pInstance->SetData(TYPE_BUG_TRIO, SPECIAL);
}
}
void JustReachedHome() override
{
if (m_pInstance)
{
m_pInstance->SetData(TYPE_BUG_TRIO, FAIL);
}
}
void UpdateAI(const uint32 uiDiff) override
{
// Return since we have no target
if (!m_creature->SelectHostileTarget() || !m_creature->getVictim())
{
return;
}
// Fear_Timer
if (m_uiFearTimer < uiDiff)
{
if (DoCastSpellIfCan(m_creature, SPELL_FEAR) == CAST_OK)
{
DoResetThreat();
m_uiFearTimer = 20000;
}
}
else
{ m_uiFearTimer -= uiDiff; }
// Heal
if (m_uiHealTimer < uiDiff)
{
if (Unit* pTarget = DoSelectLowestHpFriendly(100.0f))
{
if (DoCastSpellIfCan(pTarget, SPELL_HEAL) == CAST_OK)
{
m_uiHealTimer = urand(15000, 30000);
}
}
}
else
{ m_uiHealTimer -= uiDiff; }
DoMeleeAttackIfReady();
}
};
CreatureAI* GetAI_boss_yauj(Creature* pCreature)
{
return new boss_yaujAI(pCreature);
}
CreatureAI* GetAI_boss_vem(Creature* pCreature)
{
return new boss_vemAI(pCreature);
}
CreatureAI* GetAI_boss_kri(Creature* pCreature)
{
return new boss_kriAI(pCreature);
}
void AddSC_bug_trio()
{
Script* pNewScript;
pNewScript = new Script;
pNewScript->Name = "boss_kri";
pNewScript->GetAI = &GetAI_boss_kri;
pNewScript->RegisterSelf();
pNewScript = new Script;
pNewScript->Name = "boss_vem";
pNewScript->GetAI = &GetAI_boss_vem;
pNewScript->RegisterSelf();
pNewScript = new Script;
pNewScript->Name = "boss_yauj";
pNewScript->GetAI = &GetAI_boss_yauj;
pNewScript->RegisterSelf();
}
| {
"pile_set_name": "Github"
} |
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx1024m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
| {
"pile_set_name": "Github"
} |
<html>
<body>
<h1>Directory listing</h1>
<hr/>
<pre>
<a href="management-core-3.0.4-javadoc.jar">management-core-3.0.4-javadoc.jar</a>
<a href="management-core-3.0.4-javadoc.jar.md5">management-core-3.0.4-javadoc.jar.md5</a>
<a href="management-core-3.0.4-javadoc.jar.sha1">management-core-3.0.4-javadoc.jar.sha1</a>
<a href="management-core-3.0.4-sources.jar">management-core-3.0.4-sources.jar</a>
<a href="management-core-3.0.4-sources.jar.md5">management-core-3.0.4-sources.jar.md5</a>
<a href="management-core-3.0.4-sources.jar.sha1">management-core-3.0.4-sources.jar.sha1</a>
<a href="management-core-3.0.4.jar">management-core-3.0.4.jar</a>
<a href="management-core-3.0.4.jar.md5">management-core-3.0.4.jar.md5</a>
<a href="management-core-3.0.4.jar.sha1">management-core-3.0.4.jar.sha1</a>
<a href="management-core-3.0.4.pom">management-core-3.0.4.pom</a>
<a href="management-core-3.0.4.pom.md5">management-core-3.0.4.pom.md5</a>
<a href="management-core-3.0.4.pom.sha1">management-core-3.0.4.pom.sha1</a>
</pre>
</body>
</html>
| {
"pile_set_name": "Github"
} |
package tk.woppo.sunday.model;
import android.database.Cursor;
import com.google.gson.Gson;
import com.google.gson.annotations.SerializedName;
import java.util.HashMap;
import tk.woppo.sunday.dao.WeatherDataHelper;
import tk.woppo.sunday.dao.WeatherTodayDataHelper;
/**
* Created by Ho on 2014/7/4.
*/
public class WeatherTodayModel extends BaseModel {
private static final HashMap<String, WeatherTodayModel> CACHE = new HashMap<String, WeatherTodayModel>();
/** 城市ID */
@SerializedName("cityid")
public String id;
/** 城市名称 */
@SerializedName("city")
public String cityName;
/** 温度 */
public String temp;
/** 天气 */
public String weather;
/** 风向 */
@SerializedName("WD")
public String wind;
/** 风力 */
@SerializedName("WS")
public String ws;
/** 湿度 */
@SerializedName("SD")
public String sd;
/** 发布时间 */
public String time;
private static void addToCache(WeatherTodayModel model) {
CACHE.put(model.id, model);
}
private static WeatherTodayModel getFromCache(String id) {
return CACHE.get(id);
}
public static WeatherTodayModel fromJson(String json) {
return new Gson().fromJson(json, WeatherTodayModel.class);
}
public static WeatherTodayModel fromCursor(Cursor cursor) {
String id = cursor.getString(cursor.getColumnIndex(WeatherDataHelper.WeatherDBInfo.ID));
WeatherTodayModel model = getFromCache(id);
if (model != null) {
return model;
}
model = new Gson().fromJson(cursor.getString(cursor.getColumnIndex(WeatherTodayDataHelper.WeatherTodayDBInfo.JSON)), WeatherTodayModel.class);
addToCache(model);
return model;
}
public static class WeatherTodayRequestData {
public WeatherTodayModel weatherinfo;
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#import "SimpleDBMissingParameterException.h"
@implementation SimpleDBMissingParameterException
@synthesize boxUsage;
-(id)initWithMessage:(NSString *)theMessage
{
if (self = [super initWithMessage:theMessage]) {
}
return self;
}
-(void)setPropertiesWithException:(AmazonServiceException *)theException
{
[super setPropertiesWithException:theException];
if ([theException.additionalFields valueForKey:@"BoxUsage"] != nil) {
self.boxUsage = [AmazonSDKUtil convertStringToNumber:[theException.additionalFields valueForKey:@"BoxUsage"]];
}
}
-(NSString *)description
{
NSMutableString *buffer = [[NSMutableString alloc] initWithCapacity:256];
[buffer appendString:@"{"];
[buffer appendString:[[[NSString alloc] initWithFormat:@"BoxUsage: %@,", boxUsage] autorelease]];
[buffer appendString:[super description]];
[buffer appendString:@"}"];
return [buffer autorelease];
}
-(void)dealloc
{
[boxUsage release];
[super dealloc];
}
@end
| {
"pile_set_name": "Github"
} |
<?php
/*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This software consists of voluntary contributions made by many individuals
* and is licensed under the LGPL. For more information, see
* <http://www.doctrine-project.org>.
*/
namespace Doctrine\ORM\Internal\Hydration;
use Doctrine\DBAL\Connection;
/**
* Hydrator that produces flat, rectangular results of scalar data.
* The created result is almost the same as a regular SQL result set, except
* that column names are mapped to field names and data type conversions take place.
*
* @author Roman Borschel <roman@code-factory.org>
* @since 2.0
*/
class ScalarHydrator extends AbstractHydrator
{
/** @override */
protected function _hydrateAll()
{
$result = array();
$cache = array();
while ($data = $this->_stmt->fetch(\PDO::FETCH_ASSOC)) {
$result[] = $this->_gatherScalarRowData($data, $cache);
}
return $result;
}
/** @override */
protected function _hydrateRow(array $data, array &$cache, array &$result)
{
$result[] = $this->_gatherScalarRowData($data, $cache);
}
} | {
"pile_set_name": "Github"
} |
package network
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// VpnSitesClient is the network Client
type VpnSitesClient struct {
BaseClient
}
// NewVpnSitesClient creates an instance of the VpnSitesClient client.
func NewVpnSitesClient(subscriptionID string) VpnSitesClient {
return NewVpnSitesClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewVpnSitesClientWithBaseURI creates an instance of the VpnSitesClient client.
func NewVpnSitesClientWithBaseURI(baseURI string, subscriptionID string) VpnSitesClient {
return VpnSitesClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// CreateOrUpdate creates a VpnSite resource if it doesn't exist else updates the existing VpnSite.
// Parameters:
// resourceGroupName - the resource group name of the VpnSite.
// vpnSiteName - the name of the VpnSite being created or updated.
// vpnSiteParameters - parameters supplied to create or update VpnSite.
func (client VpnSitesClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, vpnSiteName string, vpnSiteParameters VpnSite) (result VpnSitesCreateOrUpdateFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/VpnSitesClient.CreateOrUpdate")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, vpnSiteName, vpnSiteParameters)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
result, err = client.CreateOrUpdateSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "CreateOrUpdate", result.Response(), "Failure sending request")
return
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client VpnSitesClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, vpnSiteName string, vpnSiteParameters VpnSite) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"vpnSiteName": autorest.Encode("path", vpnSiteName),
}
const APIVersion = "2018-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites/{vpnSiteName}", pathParameters),
autorest.WithJSON(vpnSiteParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client VpnSitesClient) CreateOrUpdateSender(req *http.Request) (future VpnSitesCreateOrUpdateFuture, err error) {
var resp *http.Response
resp, err = autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client VpnSitesClient) CreateOrUpdateResponder(resp *http.Response) (result VpnSite, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete deletes a VpnSite.
// Parameters:
// resourceGroupName - the resource group name of the VpnSite.
// vpnSiteName - the name of the VpnSite being deleted.
func (client VpnSitesClient) Delete(ctx context.Context, resourceGroupName string, vpnSiteName string) (result VpnSitesDeleteFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/VpnSitesClient.Delete")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.DeletePreparer(ctx, resourceGroupName, vpnSiteName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "Delete", nil, "Failure preparing request")
return
}
result, err = client.DeleteSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "Delete", result.Response(), "Failure sending request")
return
}
return
}
// DeletePreparer prepares the Delete request.
func (client VpnSitesClient) DeletePreparer(ctx context.Context, resourceGroupName string, vpnSiteName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"vpnSiteName": autorest.Encode("path", vpnSiteName),
}
const APIVersion = "2018-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites/{vpnSiteName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client VpnSitesClient) DeleteSender(req *http.Request) (future VpnSitesDeleteFuture, err error) {
var resp *http.Response
resp, err = autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client VpnSitesClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// Get retrieves the details of a VPNsite.
// Parameters:
// resourceGroupName - the resource group name of the VpnSite.
// vpnSiteName - the name of the VpnSite being retrieved.
func (client VpnSitesClient) Get(ctx context.Context, resourceGroupName string, vpnSiteName string) (result VpnSite, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/VpnSitesClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.GetPreparer(ctx, resourceGroupName, vpnSiteName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client VpnSitesClient) GetPreparer(ctx context.Context, resourceGroupName string, vpnSiteName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"vpnSiteName": autorest.Encode("path", vpnSiteName),
}
const APIVersion = "2018-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites/{vpnSiteName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client VpnSitesClient) GetSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client VpnSitesClient) GetResponder(resp *http.Response) (result VpnSite, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// List lists all the VpnSites in a subscription.
func (client VpnSitesClient) List(ctx context.Context) (result ListVpnSitesResultPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/VpnSitesClient.List")
defer func() {
sc := -1
if result.lvsr.Response.Response != nil {
sc = result.lvsr.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listNextResults
req, err := client.ListPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.lvsr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "List", resp, "Failure sending request")
return
}
result.lvsr, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "List", resp, "Failure responding to request")
}
return
}
// ListPreparer prepares the List request.
func (client VpnSitesClient) ListPreparer(ctx context.Context) (*http.Request, error) {
pathParameters := map[string]interface{}{
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/vpnSites", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client VpnSitesClient) ListSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client VpnSitesClient) ListResponder(resp *http.Response) (result ListVpnSitesResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listNextResults retrieves the next set of results, if any.
func (client VpnSitesClient) listNextResults(ctx context.Context, lastResults ListVpnSitesResult) (result ListVpnSitesResult, err error) {
req, err := lastResults.listVpnSitesResultPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "network.VpnSitesClient", "listNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.VpnSitesClient", "listNextResults", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "listNextResults", resp, "Failure responding to next results request")
}
return
}
// ListComplete enumerates all values, automatically crossing page boundaries as required.
func (client VpnSitesClient) ListComplete(ctx context.Context) (result ListVpnSitesResultIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/VpnSitesClient.List")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.List(ctx)
return
}
// ListByResourceGroup lists all the vpnSites in a resource group.
// Parameters:
// resourceGroupName - the resource group name of the VpnSite.
func (client VpnSitesClient) ListByResourceGroup(ctx context.Context, resourceGroupName string) (result ListVpnSitesResultPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/VpnSitesClient.ListByResourceGroup")
defer func() {
sc := -1
if result.lvsr.Response.Response != nil {
sc = result.lvsr.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listByResourceGroupNextResults
req, err := client.ListByResourceGroupPreparer(ctx, resourceGroupName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "ListByResourceGroup", nil, "Failure preparing request")
return
}
resp, err := client.ListByResourceGroupSender(req)
if err != nil {
result.lvsr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "ListByResourceGroup", resp, "Failure sending request")
return
}
result.lvsr, err = client.ListByResourceGroupResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "ListByResourceGroup", resp, "Failure responding to request")
}
return
}
// ListByResourceGroupPreparer prepares the ListByResourceGroup request.
func (client VpnSitesClient) ListByResourceGroupPreparer(ctx context.Context, resourceGroupName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListByResourceGroupSender sends the ListByResourceGroup request. The method will close the
// http.Response Body if it receives an error.
func (client VpnSitesClient) ListByResourceGroupSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListByResourceGroupResponder handles the response to the ListByResourceGroup request. The method always
// closes the http.Response Body.
func (client VpnSitesClient) ListByResourceGroupResponder(resp *http.Response) (result ListVpnSitesResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listByResourceGroupNextResults retrieves the next set of results, if any.
func (client VpnSitesClient) listByResourceGroupNextResults(ctx context.Context, lastResults ListVpnSitesResult) (result ListVpnSitesResult, err error) {
req, err := lastResults.listVpnSitesResultPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "network.VpnSitesClient", "listByResourceGroupNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListByResourceGroupSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.VpnSitesClient", "listByResourceGroupNextResults", resp, "Failure sending next results request")
}
result, err = client.ListByResourceGroupResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "listByResourceGroupNextResults", resp, "Failure responding to next results request")
}
return
}
// ListByResourceGroupComplete enumerates all values, automatically crossing page boundaries as required.
func (client VpnSitesClient) ListByResourceGroupComplete(ctx context.Context, resourceGroupName string) (result ListVpnSitesResultIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/VpnSitesClient.ListByResourceGroup")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.ListByResourceGroup(ctx, resourceGroupName)
return
}
// UpdateTags updates VpnSite tags.
// Parameters:
// resourceGroupName - the resource group name of the VpnSite.
// vpnSiteName - the name of the VpnSite being updated.
// vpnSiteParameters - parameters supplied to update VpnSite tags.
func (client VpnSitesClient) UpdateTags(ctx context.Context, resourceGroupName string, vpnSiteName string, vpnSiteParameters TagsObject) (result VpnSitesUpdateTagsFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/VpnSitesClient.UpdateTags")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.UpdateTagsPreparer(ctx, resourceGroupName, vpnSiteName, vpnSiteParameters)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "UpdateTags", nil, "Failure preparing request")
return
}
result, err = client.UpdateTagsSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VpnSitesClient", "UpdateTags", result.Response(), "Failure sending request")
return
}
return
}
// UpdateTagsPreparer prepares the UpdateTags request.
func (client VpnSitesClient) UpdateTagsPreparer(ctx context.Context, resourceGroupName string, vpnSiteName string, vpnSiteParameters TagsObject) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"vpnSiteName": autorest.Encode("path", vpnSiteName),
}
const APIVersion = "2018-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPatch(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites/{vpnSiteName}", pathParameters),
autorest.WithJSON(vpnSiteParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// UpdateTagsSender sends the UpdateTags request. The method will close the
// http.Response Body if it receives an error.
func (client VpnSitesClient) UpdateTagsSender(req *http.Request) (future VpnSitesUpdateTagsFuture, err error) {
var resp *http.Response
resp, err = autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// UpdateTagsResponder handles the response to the UpdateTags request. The method always
// closes the http.Response Body.
func (client VpnSitesClient) UpdateTagsResponder(resp *http.Response) (result VpnSite, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
| {
"pile_set_name": "Github"
} |
'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"Dinda",
"Dilolo"
],
"DAY": [
"Lumingu",
"Nkodya",
"Nd\u00e0ay\u00e0",
"Ndang\u00f9",
"Nj\u00f2wa",
"Ng\u00f2vya",
"Lubingu"
],
"MONTH": [
"Ciongo",
"L\u00f9ishi",
"Lus\u00f2lo",
"M\u00f9uy\u00e0",
"Lum\u00f9ng\u00f9l\u00f9",
"Lufuimi",
"Kab\u00e0l\u00e0sh\u00ecp\u00f9",
"L\u00f9sh\u00eck\u00e0",
"Lutongolo",
"Lung\u00f9di",
"Kasw\u00e8k\u00e8s\u00e8",
"Cisw\u00e0"
],
"SHORTDAY": [
"Lum",
"Nko",
"Ndy",
"Ndg",
"Njw",
"Ngv",
"Lub"
],
"SHORTMONTH": [
"Cio",
"Lui",
"Lus",
"Muu",
"Lum",
"Luf",
"Kab",
"Lush",
"Lut",
"Lun",
"Kas",
"Cis"
],
"fullDate": "EEEE d MMMM y",
"longDate": "d MMMM y",
"medium": "d MMM y HH:mm:ss",
"mediumDate": "d MMM y",
"mediumTime": "HH:mm:ss",
"short": "d/M/y HH:mm",
"shortDate": "d/M/y",
"shortTime": "HH:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "FrCD",
"DECIMAL_SEP": ",",
"GROUP_SEP": ".",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-",
"negSuf": "\u00a4",
"posPre": "",
"posSuf": "\u00a4"
}
]
},
"id": "lu-cd",
"pluralCat": function (n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]); | {
"pile_set_name": "Github"
} |
package com.android.inputmethodcommon;
class InputMethodSettingsInterface {
}
class InputMethodSettingsImpl {
int mContext;
int mImi;
int mImm;
int mSubtypeEnablerIcon;
int mSubtypeEnablerIconRes;
int mSubtypeEnablerTitle;
int mSubtypeEnablerTitleRes;
int mInputMethodSettingsCategoryTitle;
int mInputMethodSettingsCategoryTitleRes;
int mSubtypeEnablerPreference;
}
class InputMethodSettingsFragment {
int mSettings;
}
class InputMethodSettingsActivity {
int mSettings;
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/appBackground"
android:foreground="?android:attr/selectableItemBackground"
android:gravity="center_vertical"
android:orientation="horizontal"
android:paddingBottom="15dp"
android:paddingLeft="10dp"
android:paddingRight="10dp"
android:paddingTop="15dp">
<ImageView
android:id="@+id/song_item_img"
android:layout_width="50dp"
android:layout_height="50dp"
android:layout_weight="0" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginStart="15dp"
android:layout_weight="1"
android:orientation="vertical">
<TextView
android:id="@+id/song_item_name"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:singleLine="true"
android:textColor="#000"
android:textSize="16sp" />
<TextView
android:id="@+id/song_item_artist"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:singleLine="true"
android:textColor="#989898"
android:textSize="14sp" />
</LinearLayout>
<ImageView
android:id="@+id/song_item_menu"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginRight="5dp"
android:layout_weight="0"
android:background="@drawable/unbounded_ripple"
android:foregroundTint="#434343"
android:padding="5dp"
android:src="@drawable/abc_ic_menu_moreoverflow_mtrl_alpha"
android:theme="@style/Theme.AppCompat.Light" />
</LinearLayout> | {
"pile_set_name": "Github"
} |
/*#######################################################
* Copyright (c) 2014 Jeff Martin
* Copyright (c) 2015 Pedro Lafuente
* Copyright (c) 2017-2019 Gregor Santner
*
* Licensed under the MIT license.
* You can get a copy of the license text here:
* https://opensource.org/licenses/MIT
###########################################################*/
package other.writeily.ui;
import android.app.Dialog;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.DialogFragment;
import android.support.v7.app.AlertDialog;
import android.text.TextUtils;
import net.gsantner.markor.R;
import net.gsantner.markor.util.AppSettings;
import java.io.Serializable;
public class WrConfirmDialog extends DialogFragment {
public static final String FRAGMENT_TAG = "WrConfirmDialog";
private static final String EXTRA_TITLE = "EXTRA_TITLE";
private static final String EXTRA_MESSAGE = "EXTRA_MESSAGE";
public static final String EXTRA_DATA = "EXTRA_DATA";
private Serializable _data;
private ConfirmDialogCallback[] _callbacks;
private String _summary;
public static WrConfirmDialog newInstance(String title, String message,
Serializable data, ConfirmDialogCallback... callbacks) {
WrConfirmDialog confirmDialog = new WrConfirmDialog();
Bundle args = new Bundle();
args.putSerializable(EXTRA_DATA, data);
args.putString(EXTRA_TITLE, title);
args.putString(EXTRA_MESSAGE, message);
confirmDialog.setArguments(args);
confirmDialog.setCallbacks(callbacks);
return confirmDialog;
}
public void setCallbacks(ConfirmDialogCallback[] callbacks) {
_callbacks = callbacks;
}
@Override
@NonNull
public Dialog onCreateDialog(Bundle savedInstanceState) {
String title = getArguments().getString(EXTRA_TITLE);
String message = getArguments().getString(EXTRA_MESSAGE);
_data = getArguments().getSerializable(EXTRA_DATA);
AlertDialog.Builder dialogBuilder;
boolean darkTheme = AppSettings.get().isDarkThemeEnabled();
dialogBuilder = new AlertDialog.Builder(getActivity(), darkTheme ?
R.style.Theme_AppCompat_Dialog : R.style.Theme_AppCompat_Light_Dialog);
dialogBuilder.setTitle(title);
if (!TextUtils.isEmpty(message)) {
dialogBuilder.setMessage(message);
}
dialogBuilder.setPositiveButton(getString(android.R.string.ok), (dialog, which) -> {
if (_callbacks != null) {
for (ConfirmDialogCallback cdc : _callbacks) {
if (cdc != null) {
cdc.onConfirmDialogAnswer(true, _data);
}
}
}
});
dialogBuilder.setNegativeButton(getString(R.string.cancel), (dialog, which) -> {
dialog.dismiss();
for (ConfirmDialogCallback cdc : _callbacks) {
cdc.onConfirmDialogAnswer(false, _data);
}
});
return dialogBuilder.show();
}
public interface ConfirmDialogCallback {
void onConfirmDialogAnswer(boolean confirmed, Serializable data);
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2004-present Facebook. All Rights Reserved.
#include "SamplingProfilerJniMethod.h"
#include <JavaScriptCore/JSProfilerPrivate.h>
#include <jschelpers/JSCHelpers.h>
#include <jni.h>
#include <string>
using namespace facebook::jni;
namespace facebook {
namespace react {
/* static */ jni::local_ref<SamplingProfilerJniMethod::jhybriddata>
SamplingProfilerJniMethod::initHybrid(jni::alias_ref<jclass>,
jlong javaScriptContext) {
return makeCxxInstance(javaScriptContext);
}
/* static */ void SamplingProfilerJniMethod::registerNatives() {
registerHybrid(
{makeNativeMethod("initHybrid", SamplingProfilerJniMethod::initHybrid),
makeNativeMethod("poke", SamplingProfilerJniMethod::poke)});
}
SamplingProfilerJniMethod::SamplingProfilerJniMethod(jlong javaScriptContext) {
context_ = reinterpret_cast<JSGlobalContextRef>(javaScriptContext);
}
void SamplingProfilerJniMethod::poke(
jni::alias_ref<JSPackagerClientResponder::javaobject> responder) {
if (!JSC_JSSamplingProfilerEnabled(context_)) {
responder->error("The JSSamplingProfiler is disabled. See this "
"https://fburl.com/u4lw7xeq for some help");
return;
}
JSValueRef jsResult = JSC_JSPokeSamplingProfiler(context_);
if (JSC_JSValueGetType(context_, jsResult) == kJSTypeNull) {
responder->respond("started");
} else {
JSStringRef resultStrRef = JSValueToStringCopy(context_, jsResult, nullptr);
size_t length = JSStringGetLength(resultStrRef);
char buffer[length + 1];
JSStringGetUTF8CString(resultStrRef, buffer, length + 1);
JSStringRelease(resultStrRef);
responder->respond(buffer);
}
}
}
}
| {
"pile_set_name": "Github"
} |
def extractStartlingSurprisesAtEveryStep(item):
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or 'preview' in item['title'].lower():
return None
if 'bu bu jing xin' in item['tags']:
return buildReleaseMessageWithType(item, 'Bu Bu Jing Xin', vol, chp, frag=frag, postfix=postfix)
return False
| {
"pile_set_name": "Github"
} |
'use strict';
var dbm;
var type;
var seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db) {
return Promise.all([
db.runSql('UPDATE office SET name = \'Office of Brazil and Southern Cone (WHA/BSC)\' where name=\'Office of Brail and Southern Cone (WHA/BSC)\''),
db.runSql('UPDATE office SET name = \'U.S. Embassy La Paz\' where name=\'U.S. Embassy LaPaz\''),
]);
};
exports.down = function (db) {
return Promise.all([
db.runSql('UPDATE office SET name = \'Office of Brail and Southern Cone (WHA/BSC)\' where name=\'Office of Brazil and Southern Cone (WHA/BSC)\''),
db.runSql('UPDATE office SET name = \'U.S. Embassy LaPaz\' where name=\'U.S. Embassy La Paz\''),
]);
};
| {
"pile_set_name": "Github"
} |
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
mojom = "//url/mojo/url.mojom"
public_headers = [ "//url/gurl.h" ]
traits_headers = [ "//url/mojo/url_gurl_struct_traits.h" ]
deps = [
"//url",
]
type_mappings = [ "url.mojom.Url=GURL" ]
| {
"pile_set_name": "Github"
} |
require_relative '../../../spec_helper'
require 'cgi'
describe "CGI::QueryExtension#from" do
before :each do
ENV['REQUEST_METHOD'], @old_request_method = "GET", ENV['REQUEST_METHOD']
@cgi = CGI.new
end
after :each do
ENV['REQUEST_METHOD'] = @old_request_method
end
it "returns ENV['HTTP_FROM']" do
old_value, ENV['HTTP_FROM'] = ENV['HTTP_FROM'], "googlebot(at)googlebot.com"
begin
@cgi.from.should == "googlebot(at)googlebot.com"
ensure
ENV['HTTP_FROM'] = old_value
end
end
end
| {
"pile_set_name": "Github"
} |
package org.basex.query.func.validate;
import org.basex.query.*;
import org.basex.query.func.*;
import org.basex.query.value.item.*;
import org.basex.util.*;
/**
* Function implementation.
*
* @author BaseX Team 2005-20, BSD License
* @author Christian Gruen
*/
public final class ValidateXsdProcessor extends StandardFunc {
@Override
public Item item(final QueryContext qc, final InputInfo ii) {
return Str.get(ValidateXsd.IMPL[ValidateXsd.OFFSET + 1]);
}
}
| {
"pile_set_name": "Github"
} |
<html>
<head>
<title>Path test</title>
<style type="text/css">
.pixel {
position: absolute;
width: 1px;
height: 1px;
overflow: hidden;
background: #000;
}
.red { background: red; }
.blue { background: blue; }
</style>
<script language="JavaScript" type="text/javascript">
// Dojo configuration
djConfig = {
isDebug: true
};
</script>
<script language="JavaScript" type="text/javascript"
src="../../dojo.js"></script>
<script language="JavaScript" type="text/javascript">
dojo.require("dojo.math.*");
function drawCurve(curve,steps,className) {
if(!className) className = "pixel";
if(!steps) steps = 100;
this.pixels = new Array(steps)
for(var i=0;i<steps;i++) {
var pt = curve.getValue(i/steps);
this.pixels[i] = document.createElement("div");
this.pixels[i].className = className;
this.pixels[i].style.left = pt[0];
this.pixels[i].style.top = pt[1];
document.body.appendChild(this.pixels[i]);
}
}
function init(){
var c = dojo.math.curves;
var p = new c.Path();
p.add(new c.Line([10,10], [100,100]), 5);
p.add(new c.Line([0,0], [20,0]), 2);
p.add(new c.CatmullRom([[0,0], [400,400], [200,200], [500,50]]), 50);
p.add(new c.Arc([0,0], [100,100]), 20);
p.add(new c.Arc([0,0], [100,100], true), 20);
drawCurve(p, 200, "pixel");
//drawCurve(new c.Line([0,250], [800,250]), 50, "pixel red");
//drawCurve(new c.Line([500,0], [500,600]), 50, "pixel red");
//drawCurve(new c.Arc([300,300], [700,200]), 50, "pixel");
//drawCurve(new c.Arc([200,200], [100,100], false), 50, "pixel blue");
}
dojo.addOnLoad(init);
</script>
</head>
<body>
</body>
</html>
| {
"pile_set_name": "Github"
} |
id: dsq-747531936
date: 2010-04-05T22:49:24.0000000-07:00
name: DonSleza4e
avatar: https://disqus.com/api/users/avatars/DonSleza4e.jpg
message: <p>Awesome<br>Integrated lib with my <a href="http://asp.net" rel="nofollow noopener" title="asp.net">asp.net</a> mvc project ^^</p>
| {
"pile_set_name": "Github"
} |
#include <bits/stdc++.h>
#define sd(x) scanf("%d",&x)
#define sd2(x,y) scanf("%d%d",&x,&y)
#define sd3(x,y,z) scanf("%d%d%d",&x,&y,&z)
#define fi first
#define se second
#define pb(x) push_back(x)
#define mp(x,y) make_pair(x,y)
#define LET(x, a) __typeof(a) x(a)
#define foreach(it, v) for(LET(it, v.begin()); it != v.end(); it++)
#define _ ios_base::sync_with_stdio(false);cin.tie(NULL);cout.tie(NULL);
#define __ freopen("input.txt","r",stdin);freopen("output.txt","w",stdout);
#define func __FUNCTION__
#define line __LINE__
using namespace std;
template<typename S, typename T>
ostream& operator<<(ostream& out, pair<S, T> const& p){out<<'('<<p.fi<<", "<<p.se<<')'; return out;}
template<typename T>
ostream& operator<<(ostream& out, vector<T> const & v){
int l = v.size(); for(int i = 0; i < l-1; i++) out<<v[i]<<' '; if(l>0) out<<v[l-1]; return out;}
void tr(){cout << endl;}
template<typename S, typename ... Strings>
void tr(S x, const Strings&... rest){cout<<x<<' ';tr(rest...);}
const int N = 100100;
int n, p;
int l[N], r[N];
int main(){
sd2(n,p);
for(int i = 0; i < n; i++){
sd2(l[i], r[i]);
}
l[n] = l[0];
r[n] = r[0];
long double res = 0;
for(int i = 1; i <= n; i++){
long long v1 = (r[i]/p) - ((l[i]-1)/p);
long long v2 = (r[i-1]/p) - ((l[i-1]-1)/p);
long long l1 = r[i]-l[i]+1;
long long l2 = r[i-1]-l[i-1]+1;
long long t = (l1-v1)*(l2-v2);
long double p = (long double) t / (long double) (l1*l2);
p = 1.0f-p;
res += p*2000;
}
printf("%.9lf\n", (double)res);
return 0;
}
| {
"pile_set_name": "Github"
} |
// RUN: %clang_cc1 -emit-llvm -triple i386-apple-macosx10.7.2 < %s | FileCheck %s
// The preferred alignment for a long long on x86-32 is 8; make sure the
// alloca for x uses that alignment.
int test (long long x) {
return (int)x;
}
// CHECK-LABEL: define i32 @test
// CHECK: alloca i64, align 8
// Make sure we honor the aligned attribute.
struct X { int x,y,z,a; };
int test2(struct X x __attribute((aligned(16)))) {
return x.z;
}
// CHECK-LABEL: define i32 @test2
// CHECK: alloca %struct._Z1X, align 16
| {
"pile_set_name": "Github"
} |
export const environment = {
production: true
};
| {
"pile_set_name": "Github"
} |
import { Component, Inject, Input } from '@angular/core';
import { MediaObserver } from '@angular/flex-layout';
import { Observable } from 'rxjs';
import { map, startWith } from 'rxjs/operators';
import { API_BASE_URL } from '../../app.tokens';
import { Product } from '../../shared/services';
@Component({
selector: 'nga-product-suggestion',
styleUrls: [ './product-suggestion.component.scss' ],
templateUrl: './product-suggestion.component.html'
})
export class ProductSuggestionComponent {
@Input() products: Product[];
readonly columns$: Observable<number>;
readonly breakpointsToColumnsNumber = new Map([
[ 'xs', 2 ],
[ 'sm', 3 ],
[ 'md', 5 ],
[ 'lg', 2 ],
[ 'xl', 3 ],
]);
constructor(
@Inject(API_BASE_URL) private readonly baseUrl: string,
private readonly media: MediaObserver
) {
// If the initial screen size is xs ObservableMedia doesn't emit an event
// In the older versions of flex-layout we used ObservableMedia, which is deprecated.
// Use MediaObserver instead
this.columns$ = this.media.media$
.pipe(
map(mc => <number>this.breakpointsToColumnsNumber.get(mc.mqAlias)),
startWith(3)
);
}
urlFor(product: Product): string {
return `${this.baseUrl}/${product.imageUrl}`;
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout android:layout_width="match_parent"
android:layout_height="match_parent"
xmlns:android="http://schemas.android.com/apk/res/android" >
<Button
android:id="@+id/btn_crash_restart"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="重启App"
android:layout_alignParentTop="true"
/>
<TextView
android:id="@+id/tv_crash_info"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:singleLine="false"
android:ellipsize="none"
android:gravity="left"
android:inputType="textMultiLine"
android:layout_below="@id/btn_crash_restart"
/>
</RelativeLayout> | {
"pile_set_name": "Github"
} |
SET UTF-8
LANG tr
| {
"pile_set_name": "Github"
} |
f := function()
local l;
l := 0 * [1..6];
l[[1..3]] := 1;
end;
f();
Where();
WhereWithVars();
quit;
f:=function() if true = 1/0 then return 1; fi; return 2; end;;
f();
Where();
WhereWithVars();
quit;
f:=function() local x; if x then return 1; fi; return 2; end;;
f();
Where();
WhereWithVars();
quit;
f:=function() if 1 then return 1; fi; return 2; end;;
f();
Where();
WhereWithVars();
quit;
f:=function() if 1 < 0 then return 1; elif 1 then return 2; fi; return 3; end;;
f();
Where();
WhereWithVars();
quit;
f:=function() while 1 do return 1; od; return 2; end;;
f();
Where();
WhereWithVars();
quit;
f:=function() local i; for i in 1 do return 1; od; return 2; end;;
f();
Where();
WhereWithVars();
quit;
f:=function() local i; for i in true do return 1; od; return 2; end;;
f();
Where();
WhereWithVars();
quit;
f:=function(x) local i,j; for i in true do return 1; od; return 2; end;;
f([1,2,3]);
Where();
WhereWithVars();
quit;
f:=function(x) local i,j; Unbind(x); for i in true do return 1; od; return 2; end;;
f([1,2,3]);
Where();
WhereWithVars();
quit;
f:=function(x) local i,j; Unbind(x); j := 4; for i in true do return 1; od; return 2; end;;
f([1,2,3]);
Where();
WhereWithVars();
quit;
f:=function() local x; repeat x:=1; until 1; return 2; end;;
f();
Where();
WhereWithVars();
quit;
f:=function() local x; Assert(0, 1); return 2; end;;
f();
Where();
WhereWithVars();
quit;
f:=function() local x; Assert(0, 1, "hello"); return 2; end;;
f();
Where();
WhereWithVars();
quit;
# Verify issue #2656 is fixed
InstallMethod( \[\,\], [ IsMatrixObj, IsPosInt, IsPosInt ],
{ m, row, col } -> ELM_LIST( m, row, col ) );
l := [[1]];; f := {} -> l[2,1];;
f();
Where();
WhereWithVars();
quit;
# verify issue #1373 is fixed
InstallMethod( Matrix, [IsFilter, IsSemiring, IsMatrixObj], {a,b,c} -> fail );
| {
"pile_set_name": "Github"
} |
package org.jetbrains.dokka.base.transformers.documentables
import org.jetbrains.dokka.model.*
import org.jetbrains.dokka.plugability.DokkaContext
import org.jetbrains.dokka.transformers.documentation.PreMergeDocumentableTransformer
import org.jetbrains.dokka.transformers.documentation.perPackageOptions
import org.jetbrains.dokka.transformers.documentation.source
import org.jetbrains.dokka.transformers.documentation.sourceSet
import java.io.File
class SuppressedDocumentableFilterTransformer(val context: DokkaContext) : PreMergeDocumentableTransformer {
override fun invoke(modules: List<DModule>): List<DModule> {
return modules.mapNotNull(::filterModule)
}
private fun filterModule(module: DModule): DModule? {
val packages = module.packages.mapNotNull { pkg -> filterPackage(pkg) }
return when {
packages == module.packages -> module
packages.isEmpty() -> null
else -> module.copy(packages = packages)
}
}
private fun filterPackage(pkg: DPackage): DPackage? {
val options = perPackageOptions(pkg)
if (options?.suppress == true) {
return null
}
val filteredChildren = pkg.children.filterNot(::isSuppressed)
return when {
filteredChildren == pkg.children -> pkg
filteredChildren.isEmpty() -> null
else -> pkg.copy(
functions = filteredChildren.filterIsInstance<DFunction>(),
classlikes = filteredChildren.filterIsInstance<DClasslike>(),
typealiases = filteredChildren.filterIsInstance<DTypeAlias>(),
properties = filteredChildren.filterIsInstance<DProperty>()
)
}
}
private fun isSuppressed(documentable: Documentable): Boolean {
if (documentable !is WithSources) return false
val sourceFile = File(source(documentable).path).absoluteFile
return sourceSet(documentable).suppressedFiles.any { suppressedFile ->
sourceFile.startsWith(suppressedFile.absoluteFile)
}
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:Runner.xcodeproj">
</FileRef>
</Workspace>
| {
"pile_set_name": "Github"
} |
/*
* linux/include/asm-arm/proc-armv/processor.h
*
* Copyright (C) 1996-1999 Russell King.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* Changelog:
* 20-09-1996 RMK Created
* 26-09-1996 RMK Added 'EXTRA_THREAD_STRUCT*'
* 28-09-1996 RMK Moved start_thread into the processor dependencies
* 09-09-1998 PJB Delete redundant `wp_works_ok'
* 30-05-1999 PJB Save sl across context switches
* 31-07-1999 RMK Added 'domain' stuff
*/
#ifndef __ASM_PROC_PROCESSOR_H
#define __ASM_PROC_PROCESSOR_H
#include <asm/proc/domain.h>
#define KERNEL_STACK_SIZE PAGE_SIZE
struct context_save_struct {
unsigned long cpsr;
unsigned long r4;
unsigned long r5;
unsigned long r6;
unsigned long r7;
unsigned long r8;
unsigned long r9;
unsigned long sl;
unsigned long fp;
unsigned long pc;
};
#define INIT_CSS (struct context_save_struct){ SVC_MODE, 0, 0, 0, 0, 0, 0, 0, 0, 0 }
#define EXTRA_THREAD_STRUCT \
unsigned int domain;
#define EXTRA_THREAD_STRUCT_INIT \
domain: domain_val(DOMAIN_USER, DOMAIN_CLIENT) | \
domain_val(DOMAIN_KERNEL, DOMAIN_MANAGER) | \
domain_val(DOMAIN_IO, DOMAIN_CLIENT)
#define start_thread(regs,pc,sp) \
({ \
unsigned long *stack = (unsigned long *)sp; \
set_fs(USER_DS); \
memzero(regs->uregs, sizeof(regs->uregs)); \
if (current->personality & ADDR_LIMIT_32BIT) \
regs->ARM_cpsr = USR_MODE; \
else \
regs->ARM_cpsr = USR26_MODE; \
regs->ARM_pc = pc; /* pc */ \
regs->ARM_sp = sp; /* sp */ \
regs->ARM_r2 = stack[2]; /* r2 (envp) */ \
regs->ARM_r1 = stack[1]; /* r1 (argv) */ \
regs->ARM_r0 = stack[0]; /* r0 (argc) */ \
})
#define KSTK_EIP(tsk) (((unsigned long *)(4096+(unsigned long)(tsk)))[1019])
#define KSTK_ESP(tsk) (((unsigned long *)(4096+(unsigned long)(tsk)))[1017])
/* Allocation and freeing of basic task resources. */
/*
* NOTE! The task struct and the stack go together
*/
#define ll_alloc_task_struct() ((struct task_struct *) __get_free_pages(GFP_KERNEL,1))
#define ll_free_task_struct(p) free_pages((unsigned long)(p),1)
#endif
| {
"pile_set_name": "Github"
} |
if (global.GENTLY) require = GENTLY.hijack(require);
var crypto = require('crypto');
var fs = require('fs');
var util = require('util'),
path = require('path'),
File = require('./file'),
MultipartParser = require('./multipart_parser').MultipartParser,
QuerystringParser = require('./querystring_parser').QuerystringParser,
OctetParser = require('./octet_parser').OctetParser,
JSONParser = require('./json_parser').JSONParser,
StringDecoder = require('string_decoder').StringDecoder,
EventEmitter = require('events').EventEmitter,
Stream = require('stream').Stream,
os = require('os');
function IncomingForm(opts) {
if (!(this instanceof IncomingForm)) return new IncomingForm(opts);
EventEmitter.call(this);
opts=opts||{};
this.error = null;
this.ended = false;
this.maxFields = opts.maxFields || 1000;
this.maxFieldsSize = opts.maxFieldsSize || 2 * 1024 * 1024;
this.keepExtensions = opts.keepExtensions || false;
this.uploadDir = opts.uploadDir || os.tmpDir();
this.encoding = opts.encoding || 'utf-8';
this.headers = null;
this.type = null;
this.hash = opts.hash || false;
this.multiples = opts.multiples || false;
this.bytesReceived = null;
this.bytesExpected = null;
this._parser = null;
this._flushing = 0;
this._fieldsSize = 0;
this.openedFiles = [];
return this;
}
util.inherits(IncomingForm, EventEmitter);
exports.IncomingForm = IncomingForm;
IncomingForm.prototype.parse = function(req, cb) {
this.pause = function() {
try {
req.pause();
} catch (err) {
// the stream was destroyed
if (!this.ended) {
// before it was completed, crash & burn
this._error(err);
}
return false;
}
return true;
};
this.resume = function() {
try {
req.resume();
} catch (err) {
// the stream was destroyed
if (!this.ended) {
// before it was completed, crash & burn
this._error(err);
}
return false;
}
return true;
};
// Setup callback first, so we don't miss anything from data events emitted
// immediately.
if (cb) {
var fields = {}, files = {};
this
.on('field', function(name, value) {
fields[name] = value;
})
.on('file', function(name, file) {
if (this.multiples) {
if (files[name]) {
if (!Array.isArray(files[name])) {
files[name] = [files[name]];
}
files[name].push(file);
} else {
files[name] = file;
}
} else {
files[name] = file;
}
})
.on('error', function(err) {
cb(err, fields, files);
})
.on('end', function() {
cb(null, fields, files);
});
}
// Parse headers and setup the parser, ready to start listening for data.
this.writeHeaders(req.headers);
// Start listening for data.
var self = this;
req
.on('error', function(err) {
self._error(err);
})
.on('aborted', function() {
self.emit('aborted');
self._error(new Error('Request aborted'));
})
.on('data', function(buffer) {
self.write(buffer);
})
.on('end', function() {
if (self.error) {
return;
}
var err = self._parser.end();
if (err) {
self._error(err);
}
});
return this;
};
IncomingForm.prototype.writeHeaders = function(headers) {
this.headers = headers;
this._parseContentLength();
this._parseContentType();
};
IncomingForm.prototype.write = function(buffer) {
if (this.error) {
return;
}
if (!this._parser) {
this._error(new Error('uninitialized parser'));
return;
}
this.bytesReceived += buffer.length;
this.emit('progress', this.bytesReceived, this.bytesExpected);
var bytesParsed = this._parser.write(buffer);
if (bytesParsed !== buffer.length) {
this._error(new Error('parser error, '+bytesParsed+' of '+buffer.length+' bytes parsed'));
}
return bytesParsed;
};
IncomingForm.prototype.pause = function() {
// this does nothing, unless overwritten in IncomingForm.parse
return false;
};
IncomingForm.prototype.resume = function() {
// this does nothing, unless overwritten in IncomingForm.parse
return false;
};
IncomingForm.prototype.onPart = function(part) {
// this method can be overwritten by the user
this.handlePart(part);
};
IncomingForm.prototype.handlePart = function(part) {
var self = this;
if (part.filename === undefined) {
var value = ''
, decoder = new StringDecoder(this.encoding);
part.on('data', function(buffer) {
self._fieldsSize += buffer.length;
if (self._fieldsSize > self.maxFieldsSize) {
self._error(new Error('maxFieldsSize exceeded, received '+self._fieldsSize+' bytes of field data'));
return;
}
value += decoder.write(buffer);
});
part.on('end', function() {
self.emit('field', part.name, value);
});
return;
}
this._flushing++;
var file = new File({
path: this._uploadPath(part.filename),
name: part.filename,
type: part.mime,
hash: self.hash
});
this.emit('fileBegin', part.name, file);
file.open();
this.openedFiles.push(file);
part.on('data', function(buffer) {
if (buffer.length == 0) {
return;
}
self.pause();
file.write(buffer, function() {
self.resume();
});
});
part.on('end', function() {
file.end(function() {
self._flushing--;
self.emit('file', part.name, file);
self._maybeEnd();
});
});
};
function dummyParser(self) {
return {
end: function () {
self.ended = true;
self._maybeEnd();
return null;
}
};
}
IncomingForm.prototype._parseContentType = function() {
if (this.bytesExpected === 0) {
this._parser = dummyParser(this);
return;
}
if (!this.headers['content-type']) {
this._error(new Error('bad content-type header, no content-type'));
return;
}
if (this.headers['content-type'].match(/octet-stream/i)) {
this._initOctetStream();
return;
}
if (this.headers['content-type'].match(/urlencoded/i)) {
this._initUrlencoded();
return;
}
if (this.headers['content-type'].match(/multipart/i)) {
var m = this.headers['content-type'].match(/boundary=(?:"([^"]+)"|([^;]+))/i);
if (m) {
this._initMultipart(m[1] || m[2]);
} else {
this._error(new Error('bad content-type header, no multipart boundary'));
}
return;
}
if (this.headers['content-type'].match(/json/i)) {
this._initJSONencoded();
return;
}
this._error(new Error('bad content-type header, unknown content-type: '+this.headers['content-type']));
};
IncomingForm.prototype._error = function(err) {
if (this.error || this.ended) {
return;
}
this.error = err;
this.emit('error', err);
if (Array.isArray(this.openedFiles)) {
this.openedFiles.forEach(function(file) {
file._writeStream.destroy();
setTimeout(fs.unlink, 0, file.path, function(error) { });
});
}
};
IncomingForm.prototype._parseContentLength = function() {
this.bytesReceived = 0;
if (this.headers['content-length']) {
this.bytesExpected = parseInt(this.headers['content-length'], 10);
} else if (this.headers['transfer-encoding'] === undefined) {
this.bytesExpected = 0;
}
if (this.bytesExpected !== null) {
this.emit('progress', this.bytesReceived, this.bytesExpected);
}
};
IncomingForm.prototype._newParser = function() {
return new MultipartParser();
};
IncomingForm.prototype._initMultipart = function(boundary) {
this.type = 'multipart';
var parser = new MultipartParser(),
self = this,
headerField,
headerValue,
part;
parser.initWithBoundary(boundary);
parser.onPartBegin = function() {
part = new Stream();
part.readable = true;
part.headers = {};
part.name = null;
part.filename = null;
part.mime = null;
part.transferEncoding = 'binary';
part.transferBuffer = '';
headerField = '';
headerValue = '';
};
parser.onHeaderField = function(b, start, end) {
headerField += b.toString(self.encoding, start, end);
};
parser.onHeaderValue = function(b, start, end) {
headerValue += b.toString(self.encoding, start, end);
};
parser.onHeaderEnd = function() {
headerField = headerField.toLowerCase();
part.headers[headerField] = headerValue;
var m = headerValue.match(/\bname="([^"]+)"/i);
if (headerField == 'content-disposition') {
if (m) {
part.name = m[1];
}
part.filename = self._fileName(headerValue);
} else if (headerField == 'content-type') {
part.mime = headerValue;
} else if (headerField == 'content-transfer-encoding') {
part.transferEncoding = headerValue.toLowerCase();
}
headerField = '';
headerValue = '';
};
parser.onHeadersEnd = function() {
switch(part.transferEncoding){
case 'binary':
case '7bit':
case '8bit':
parser.onPartData = function(b, start, end) {
part.emit('data', b.slice(start, end));
};
parser.onPartEnd = function() {
part.emit('end');
};
break;
case 'base64':
parser.onPartData = function(b, start, end) {
part.transferBuffer += b.slice(start, end).toString('ascii');
/*
four bytes (chars) in base64 converts to three bytes in binary
encoding. So we should always work with a number of bytes that
can be divided by 4, it will result in a number of buytes that
can be divided vy 3.
*/
var offset = parseInt(part.transferBuffer.length / 4, 10) * 4;
part.emit('data', new Buffer(part.transferBuffer.substring(0, offset), 'base64'));
part.transferBuffer = part.transferBuffer.substring(offset);
};
parser.onPartEnd = function() {
part.emit('data', new Buffer(part.transferBuffer, 'base64'));
part.emit('end');
};
break;
default:
return self._error(new Error('unknown transfer-encoding'));
}
self.onPart(part);
};
parser.onEnd = function() {
self.ended = true;
self._maybeEnd();
};
this._parser = parser;
};
IncomingForm.prototype._fileName = function(headerValue) {
var m = headerValue.match(/\bfilename="(.*?)"($|; )/i);
if (!m) return;
var filename = m[1].substr(m[1].lastIndexOf('\\') + 1);
filename = filename.replace(/%22/g, '"');
filename = filename.replace(/&#([\d]{4});/g, function(m, code) {
return String.fromCharCode(code);
});
return filename;
};
IncomingForm.prototype._initUrlencoded = function() {
this.type = 'urlencoded';
var parser = new QuerystringParser(this.maxFields)
, self = this;
parser.onField = function(key, val) {
self.emit('field', key, val);
};
parser.onEnd = function() {
self.ended = true;
self._maybeEnd();
};
this._parser = parser;
};
IncomingForm.prototype._initOctetStream = function() {
this.type = 'octet-stream';
var filename = this.headers['x-file-name'];
var mime = this.headers['content-type'];
var file = new File({
path: this._uploadPath(filename),
name: filename,
type: mime
});
this.emit('fileBegin', filename, file);
file.open();
this._flushing++;
var self = this;
self._parser = new OctetParser();
//Keep track of writes that haven't finished so we don't emit the file before it's done being written
var outstandingWrites = 0;
self._parser.on('data', function(buffer){
self.pause();
outstandingWrites++;
file.write(buffer, function() {
outstandingWrites--;
self.resume();
if(self.ended){
self._parser.emit('doneWritingFile');
}
});
});
self._parser.on('end', function(){
self._flushing--;
self.ended = true;
var done = function(){
file.end(function() {
self.emit('file', 'file', file);
self._maybeEnd();
});
};
if(outstandingWrites === 0){
done();
} else {
self._parser.once('doneWritingFile', done);
}
});
};
IncomingForm.prototype._initJSONencoded = function() {
this.type = 'json';
var parser = new JSONParser()
, self = this;
if (this.bytesExpected) {
parser.initWithLength(this.bytesExpected);
}
parser.onField = function(key, val) {
self.emit('field', key, val);
};
parser.onEnd = function() {
self.ended = true;
self._maybeEnd();
};
this._parser = parser;
};
IncomingForm.prototype._uploadPath = function(filename) {
var name = 'upload_';
var buf = crypto.randomBytes(16);
for (var i = 0; i < buf.length; ++i) {
name += ('0' + buf[i].toString(16)).slice(-2);
}
if (this.keepExtensions) {
var ext = path.extname(filename);
ext = ext.replace(/(\.[a-z0-9]+).*/i, '$1');
name += ext;
}
return path.join(this.uploadDir, name);
};
IncomingForm.prototype._maybeEnd = function() {
if (!this.ended || this._flushing || this.error) {
return;
}
this.emit('end');
};
| {
"pile_set_name": "Github"
} |
<!-- ============ PROGRESS -->
<!-- ====================== -->
<h1>Progress</h1>
<!-- ============ VARIABLES -->
<!-- ====================== -->
<p>
<h4>Global variables</h4>
<div><pre hljs class="prettyprint lang-sass">$progress-class: "-progress" !global
$progress-bar-class: "-bar" !global
$progress-bar-padding-vertical: $base-padding-vertical / 3
$progress-bar-padding-horizontal: $base-padding-horizontal / 1.5
$progress-font-weight: 600 !global
$progress-border-radius: 4px !global
$progress-border-width: 0px !global
$progress-border-style: solid !global
$progress-padding: 3px !global
$progress-background: #fff !global</pre></div>
</p>
<p>
Use widget class <code>-progress</code>. Apply themes and sizes. Append <code>-bar</code> inside <code>-progress</code>.
</p>
<div class="-row example-block">
<div class="-col12 view">
<div class="-progress -primary-">
<div class="-bar" style="width: 12%">12 %</div><div class="-bar -warning-" style="width: 25%">25 %</div><div class="-bar -error-" style="width: 5%">Something goes wrong</div>
</div>
<br>
<div class="-progress _divine -primary-">
<div class="-bar" style="width: 12%">12 %</div>
</div>
<br>
<div class="-progress -primary- -shadow-curve-">
<div class="-bar" style="width: 42%">progress with shadow 42 %</div><div class="-bar -warning-" style="width: 25%">25 %</div>
</div>
<br>
<div class="-progress -primary- -shadow-lifted-">
<div class="-bar" style="width: 42%">progress with shadow 42 %</div>
</div>
</div>
<div class="-col12 example"><pre hljs class="prettyprint lang-html"><div class="-progress -primary-">
<div class="-bar" style="width: 12%">12 %</div>
<div class="-bar -warning-" style="width: 25%">25 %</div>
<div class="-bar -error-" style="width: 5%">Something goes wrong</div>
</div>
<div class="-progress _divine -primary-">
<div class="-bar" style="width: 12%">12 %</div>
</div>
</pre></div>
</div>
| {
"pile_set_name": "Github"
} |
// This file is part of Eigen, a lightweight C++ template library
// for linear algebra.
//
// Copyright (C) 2012 Gael Guennebaud <gael.guennebaud@inria.fr>
//
// This Source Code Form is subject to the terms of the Mozilla
// Public License v. 2.0. If a copy of the MPL was not distributed
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#ifndef EIGEN_REF_H
#define EIGEN_REF_H
namespace Eigen {
template<typename Derived> class RefBase;
template<typename PlainObjectType, int Options = 0,
typename StrideType = typename internal::conditional<PlainObjectType::IsVectorAtCompileTime,InnerStride<1>,OuterStride<> >::type > class Ref;
/** \class Ref
* \ingroup Core_Module
*
* \brief A matrix or vector expression mapping an existing expressions
*
* \tparam PlainObjectType the equivalent matrix type of the mapped data
* \tparam Options specifies whether the pointer is \c #Aligned, or \c #Unaligned.
* The default is \c #Unaligned.
* \tparam StrideType optionally specifies strides. By default, Ref implies a contiguous storage along the inner dimension (inner stride==1),
* but accept a variable outer stride (leading dimension).
* This can be overridden by specifying strides.
* The type passed here must be a specialization of the Stride template, see examples below.
*
* This class permits to write non template functions taking Eigen's object as parameters while limiting the number of copies.
* A Ref<> object can represent either a const expression or a l-value:
* \code
* // in-out argument:
* void foo1(Ref<VectorXf> x);
*
* // read-only const argument:
* void foo2(const Ref<const VectorXf>& x);
* \endcode
*
* In the in-out case, the input argument must satisfies the constraints of the actual Ref<> type, otherwise a compilation issue will be triggered.
* By default, a Ref<VectorXf> can reference any dense vector expression of float having a contiguous memory layout.
* Likewise, a Ref<MatrixXf> can reference any column major dense matrix expression of float whose column's elements are contiguously stored with
* the possibility to have a constant space inbetween each column, i.e.: the inner stride mmust be equal to 1, but the outer-stride (or leading dimension),
* can be greater than the number of rows.
*
* In the const case, if the input expression does not match the above requirement, then it is evaluated into a temporary before being passed to the function.
* Here are some examples:
* \code
* MatrixXf A;
* VectorXf a;
* foo1(a.head()); // OK
* foo1(A.col()); // OK
* foo1(A.row()); // compilation error because here innerstride!=1
* foo2(A.row()); // The row is copied into a contiguous temporary
* foo2(2*a); // The expression is evaluated into a temporary
* foo2(A.col().segment(2,4)); // No temporary
* \endcode
*
* The range of inputs that can be referenced without temporary can be enlarged using the last two template parameter.
* Here is an example accepting an innerstride!=1:
* \code
* // in-out argument:
* void foo3(Ref<VectorXf,0,InnerStride<> > x);
* foo3(A.row()); // OK
* \endcode
* The downside here is that the function foo3 might be significantly slower than foo1 because it won't be able to exploit vectorization, and will involved more
* expensive address computations even if the input is contiguously stored in memory. To overcome this issue, one might propose to overloads internally calling a
* template function, e.g.:
* \code
* // in the .h:
* void foo(const Ref<MatrixXf>& A);
* void foo(const Ref<MatrixXf,0,Stride<> >& A);
*
* // in the .cpp:
* template<typename TypeOfA> void foo_impl(const TypeOfA& A) {
* ... // crazy code goes here
* }
* void foo(const Ref<MatrixXf>& A) { foo_impl(A); }
* void foo(const Ref<MatrixXf,0,Stride<> >& A) { foo_impl(A); }
* \endcode
*
*
* \sa PlainObjectBase::Map(), \ref TopicStorageOrders
*/
namespace internal {
template<typename _PlainObjectType, int _Options, typename _StrideType>
struct traits<Ref<_PlainObjectType, _Options, _StrideType> >
: public traits<Map<_PlainObjectType, _Options, _StrideType> >
{
typedef _PlainObjectType PlainObjectType;
typedef _StrideType StrideType;
enum {
Options = _Options,
Flags = traits<Map<_PlainObjectType, _Options, _StrideType> >::Flags | NestByRefBit
};
template<typename Derived> struct match {
enum {
HasDirectAccess = internal::has_direct_access<Derived>::ret,
StorageOrderMatch = PlainObjectType::IsVectorAtCompileTime || ((PlainObjectType::Flags&RowMajorBit)==(Derived::Flags&RowMajorBit)),
InnerStrideMatch = int(StrideType::InnerStrideAtCompileTime)==int(Dynamic)
|| int(StrideType::InnerStrideAtCompileTime)==int(Derived::InnerStrideAtCompileTime)
|| (int(StrideType::InnerStrideAtCompileTime)==0 && int(Derived::InnerStrideAtCompileTime)==1),
OuterStrideMatch = Derived::IsVectorAtCompileTime
|| int(StrideType::OuterStrideAtCompileTime)==int(Dynamic) || int(StrideType::OuterStrideAtCompileTime)==int(Derived::OuterStrideAtCompileTime),
AlignmentMatch = (_Options!=Aligned) || ((PlainObjectType::Flags&AlignedBit)==0) || ((traits<Derived>::Flags&AlignedBit)==AlignedBit),
MatchAtCompileTime = HasDirectAccess && StorageOrderMatch && InnerStrideMatch && OuterStrideMatch && AlignmentMatch
};
typedef typename internal::conditional<MatchAtCompileTime,internal::true_type,internal::false_type>::type type;
};
};
template<typename Derived>
struct traits<RefBase<Derived> > : public traits<Derived> {};
}
template<typename Derived> class RefBase
: public MapBase<Derived>
{
typedef typename internal::traits<Derived>::PlainObjectType PlainObjectType;
typedef typename internal::traits<Derived>::StrideType StrideType;
public:
typedef MapBase<Derived> Base;
EIGEN_DENSE_PUBLIC_INTERFACE(RefBase)
inline Index innerStride() const
{
return StrideType::InnerStrideAtCompileTime != 0 ? m_stride.inner() : 1;
}
inline Index outerStride() const
{
return StrideType::OuterStrideAtCompileTime != 0 ? m_stride.outer()
: IsVectorAtCompileTime ? this->size()
: int(Flags)&RowMajorBit ? this->cols()
: this->rows();
}
RefBase()
: Base(0,RowsAtCompileTime==Dynamic?0:RowsAtCompileTime,ColsAtCompileTime==Dynamic?0:ColsAtCompileTime),
// Stride<> does not allow default ctor for Dynamic strides, so let' initialize it with dummy values:
m_stride(StrideType::OuterStrideAtCompileTime==Dynamic?0:StrideType::OuterStrideAtCompileTime,
StrideType::InnerStrideAtCompileTime==Dynamic?0:StrideType::InnerStrideAtCompileTime)
{}
EIGEN_INHERIT_ASSIGNMENT_OPERATORS(RefBase)
protected:
typedef Stride<StrideType::OuterStrideAtCompileTime,StrideType::InnerStrideAtCompileTime> StrideBase;
template<typename Expression>
void construct(Expression& expr)
{
if(PlainObjectType::RowsAtCompileTime==1)
{
eigen_assert(expr.rows()==1 || expr.cols()==1);
::new (static_cast<Base*>(this)) Base(expr.data(), 1, expr.size());
}
else if(PlainObjectType::ColsAtCompileTime==1)
{
eigen_assert(expr.rows()==1 || expr.cols()==1);
::new (static_cast<Base*>(this)) Base(expr.data(), expr.size(), 1);
}
else
::new (static_cast<Base*>(this)) Base(expr.data(), expr.rows(), expr.cols());
::new (&m_stride) StrideBase(StrideType::OuterStrideAtCompileTime==0?0:expr.outerStride(),
StrideType::InnerStrideAtCompileTime==0?0:expr.innerStride());
}
StrideBase m_stride;
};
template<typename PlainObjectType, int Options, typename StrideType> class Ref
: public RefBase<Ref<PlainObjectType, Options, StrideType> >
{
typedef internal::traits<Ref> Traits;
public:
typedef RefBase<Ref> Base;
EIGEN_DENSE_PUBLIC_INTERFACE(Ref)
#ifndef EIGEN_PARSED_BY_DOXYGEN
template<typename Derived>
inline Ref(PlainObjectBase<Derived>& expr,
typename internal::enable_if<bool(Traits::template match<Derived>::MatchAtCompileTime),Derived>::type* = 0)
{
Base::construct(expr);
}
template<typename Derived>
inline Ref(const DenseBase<Derived>& expr,
typename internal::enable_if<bool(internal::is_lvalue<Derived>::value&&bool(Traits::template match<Derived>::MatchAtCompileTime)),Derived>::type* = 0,
int = Derived::ThisConstantIsPrivateInPlainObjectBase)
#else
template<typename Derived>
inline Ref(DenseBase<Derived>& expr)
#endif
{
Base::construct(expr.const_cast_derived());
}
EIGEN_INHERIT_ASSIGNMENT_OPERATORS(Ref)
};
// this is the const ref version
template<typename TPlainObjectType, int Options, typename StrideType> class Ref<const TPlainObjectType, Options, StrideType>
: public RefBase<Ref<const TPlainObjectType, Options, StrideType> >
{
typedef internal::traits<Ref> Traits;
public:
typedef RefBase<Ref> Base;
EIGEN_DENSE_PUBLIC_INTERFACE(Ref)
template<typename Derived>
inline Ref(const DenseBase<Derived>& expr)
{
// std::cout << match_helper<Derived>::HasDirectAccess << "," << match_helper<Derived>::OuterStrideMatch << "," << match_helper<Derived>::InnerStrideMatch << "\n";
// std::cout << int(StrideType::OuterStrideAtCompileTime) << " - " << int(Derived::OuterStrideAtCompileTime) << "\n";
// std::cout << int(StrideType::InnerStrideAtCompileTime) << " - " << int(Derived::InnerStrideAtCompileTime) << "\n";
construct(expr.derived(), typename Traits::template match<Derived>::type());
}
protected:
template<typename Expression>
void construct(const Expression& expr,internal::true_type)
{
Base::construct(expr);
}
template<typename Expression>
void construct(const Expression& expr, internal::false_type)
{
m_object.lazyAssign(expr);
Base::construct(m_object);
}
protected:
TPlainObjectType m_object;
};
} // end namespace Eigen
#endif // EIGEN_REF_H
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: c6be551879cd14d739b0188844ef2c60
timeCreated: 1447582131
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: e1e5ef31262d242ce8efe2020a27425e, type: 3}
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
{
"images" : [
{
"idiom" : "watch",
"scale" : "2x",
"screen-width" : "<=145"
},
{
"idiom" : "watch",
"scale" : "2x",
"screen-width" : ">161"
},
{
"idiom" : "watch",
"scale" : "2x",
"screen-width" : ">145"
},
{
"idiom" : "watch",
"scale" : "2x",
"screen-width" : ">183"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
} | {
"pile_set_name": "Github"
} |
<import src="../../../common/head.wxml"/>
<import src="../../../common/foot.wxml"/>
<view class="container">
<template is="head" data="{{title: 'sendMessage'}}"/>
<view class="page-body">
<view class="weui-cells__title">发送内容(以下字段可自由适配)</view>
<view class="weui-cells weui-cells_after-title">
<view class="weui-cell weui-cell_input">
<view class="weui-cell__hd">
<view class="weui-label">实例字段</view>
</view>
<view class="weui-cell__bd">
<input class="weui-input" type="text" placeholder="请输入"></input>
</view>
</view>
<view class="weui-cell weui-cell_input">
<view class="weui-cell__hd">
<view class="weui-label">实例字段</view>
</view>
<view class="weui-cell__bd">
<input class="weui-input" type="text" placeholder="请输入"></input>
</view>
</view>
</view>
<view class="weui-cells">
<view class="weui-cell weui-cell_input">
<view class="weui-cell__hd">
<view class="weui-label">跳转链接</view>
</view>
<view class="weui-cell__bd">
<input class="weui-input" type="text" placeholder="请输入" value="{{shareData.path}}"></input>
</view>
</view>
</view>
<view class="btn-area">
<button type="primary">发送模板消息</button>
</view>
</view>
<template is="foot"/>
</view>
| {
"pile_set_name": "Github"
} |
// Copyright (C) 2005-2006 The Trustees of Indiana University.
// Use, modification and distribution is subject to the Boost Software
// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// Authors: Douglas Gregor
// Andrew Lumsdaine
#ifndef BOOST_GRAPH_DETAIL_REMOTE_UPDATE_SET_HPP
#define BOOST_GRAPH_DETAIL_REMOTE_UPDATE_SET_HPP
#ifndef BOOST_GRAPH_USE_MPI
#error "Parallel BGL files should not be included unless <boost/graph/use_mpi.hpp> has been included"
#endif
#include <boost/graph/parallel/process_group.hpp>
#include <boost/type_traits/is_convertible.hpp>
#include <vector>
#include <boost/assert.hpp>
#include <boost/optional.hpp>
#include <queue>
namespace boost { namespace graph { namespace detail {
template<typename ProcessGroup>
void do_synchronize(ProcessGroup& pg)
{
using boost::parallel::synchronize;
synchronize(pg);
}
struct remote_set_queued {};
struct remote_set_immediate {};
template<typename ProcessGroup>
class remote_set_semantics
{
BOOST_STATIC_CONSTANT
(bool,
queued = (is_convertible<
typename ProcessGroup::communication_category,
boost::parallel::bsp_process_group_tag>::value));
public:
typedef typename mpl::if_c<queued,
remote_set_queued,
remote_set_immediate>::type type;
};
template<typename Derived, typename ProcessGroup, typename Value,
typename OwnerMap,
typename Semantics = typename remote_set_semantics<ProcessGroup>::type>
class remote_update_set;
/**********************************************************************
* Remote updating set that queues messages until synchronization *
**********************************************************************/
template<typename Derived, typename ProcessGroup, typename Value,
typename OwnerMap>
class remote_update_set<Derived, ProcessGroup, Value, OwnerMap,
remote_set_queued>
{
typedef typename property_traits<OwnerMap>::key_type Key;
typedef std::vector<std::pair<Key, Value> > Updates;
typedef typename Updates::size_type updates_size_type;
typedef typename Updates::value_type updates_pair_type;
public:
private:
typedef typename ProcessGroup::process_id_type process_id_type;
enum message_kind {
/** Message containing the number of updates that will be sent in
* a msg_updates message that will immediately follow. This
* message will contain a single value of type
* updates_size_type.
*/
msg_num_updates,
/** Contains (key, value) pairs with all of the updates from a
* particular source. The number of updates is variable, but will
* be provided in a msg_num_updates message that immediately
* preceeds this message.
*
*/
msg_updates
};
struct handle_messages
{
explicit
handle_messages(remote_update_set* self, const ProcessGroup& pg)
: self(self), update_sizes(num_processes(pg), 0) { }
void operator()(process_id_type source, int tag)
{
switch(tag) {
case msg_num_updates:
{
// Receive the # of updates
updates_size_type num_updates;
receive(self->process_group, source, tag, num_updates);
update_sizes[source] = num_updates;
}
break;
case msg_updates:
{
updates_size_type num_updates = update_sizes[source];
BOOST_ASSERT(num_updates);
// Receive the actual updates
std::vector<updates_pair_type> updates(num_updates);
receive(self->process_group, source, msg_updates, &updates[0],
num_updates);
// Send updates to derived "receive_update" member
Derived* derived = static_cast<Derived*>(self);
for (updates_size_type u = 0; u < num_updates; ++u)
derived->receive_update(source, updates[u].first, updates[u].second);
update_sizes[source] = 0;
}
break;
};
}
private:
remote_update_set* self;
std::vector<updates_size_type> update_sizes;
};
friend struct handle_messages;
protected:
remote_update_set(const ProcessGroup& pg, const OwnerMap& owner)
: process_group(pg, handle_messages(this, pg)),
updates(num_processes(pg)), owner(owner) {
}
void update(const Key& key, const Value& value)
{
if (get(owner, key) == process_id(process_group)) {
Derived* derived = static_cast<Derived*>(this);
derived->receive_update(get(owner, key), key, value);
}
else {
updates[get(owner, key)].push_back(std::make_pair(key, value));
}
}
void collect() { }
void synchronize()
{
// Emit all updates and then remove them
process_id_type num_processes = updates.size();
for (process_id_type p = 0; p < num_processes; ++p) {
if (!updates[p].empty()) {
send(process_group, p, msg_num_updates, updates[p].size());
send(process_group, p, msg_updates,
&updates[p].front(), updates[p].size());
updates[p].clear();
}
}
do_synchronize(process_group);
}
ProcessGroup process_group;
private:
std::vector<Updates> updates;
OwnerMap owner;
};
/**********************************************************************
* Remote updating set that sends messages immediately *
**********************************************************************/
template<typename Derived, typename ProcessGroup, typename Value,
typename OwnerMap>
class remote_update_set<Derived, ProcessGroup, Value, OwnerMap,
remote_set_immediate>
{
typedef typename property_traits<OwnerMap>::key_type Key;
typedef std::pair<Key, Value> update_pair_type;
typedef typename std::vector<update_pair_type>::size_type updates_size_type;
public:
typedef typename ProcessGroup::process_id_type process_id_type;
private:
enum message_kind {
/** Contains a (key, value) pair that will be updated. */
msg_update
};
struct handle_messages
{
explicit handle_messages(remote_update_set* self, const ProcessGroup& pg)
: self(self)
{ update_sizes.resize(num_processes(pg), 0); }
void operator()(process_id_type source, int tag)
{
// Receive the # of updates
BOOST_ASSERT(tag == msg_update);
update_pair_type update;
receive(self->process_group, source, tag, update);
// Send update to derived "receive_update" member
Derived* derived = static_cast<Derived*>(self);
derived->receive_update(source, update.first, update.second);
}
private:
std::vector<updates_size_type> update_sizes;
remote_update_set* self;
};
friend struct handle_messages;
protected:
remote_update_set(const ProcessGroup& pg, const OwnerMap& owner)
: process_group(pg, handle_messages(this, pg)), owner(owner) { }
void update(const Key& key, const Value& value)
{
if (get(owner, key) == process_id(process_group)) {
Derived* derived = static_cast<Derived*>(this);
derived->receive_update(get(owner, key), key, value);
}
else
send(process_group, get(owner, key), msg_update,
update_pair_type(key, value));
}
void collect()
{
typedef std::pair<process_id_type, int> probe_type;
handle_messages handler(this, process_group);
while (optional<probe_type> stp = probe(process_group))
if (stp->second == msg_update) handler(stp->first, stp->second);
}
void synchronize()
{
do_synchronize(process_group);
}
ProcessGroup process_group;
OwnerMap owner;
};
} } } // end namespace boost::graph::detail
#endif // BOOST_GRAPH_DETAIL_REMOTE_UPDATE_SET_HPP
| {
"pile_set_name": "Github"
} |
<a href="https://www.buymeacoffee.com/7eDr4fv" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/lato-orange.png" alt="Buy Me A Coffee" style="height: 41px !important;width: 174px !important;" ></a>
# 2019-ncov-frontend
> Coronavirus (COVID-19) Frontend
Backend setup can be found here [2019-ncov-api](https://github.com/sorxrob/2019-ncov-api).
## Project setup
```
npm install
```
### Compiles and hot-reloads for development
```
npm run serve
```
### Compiles and minifies for production
```
npm run build
```
### Lints and fixes files
```
npm run lint
```
## License & copyright
© Robert C Soriano
Licensed under the [MIT License](LICENSE).
## Acknowledgments
- Hat tip to anyone who's module was used
- Richard Matsen for radius scale calculation
| {
"pile_set_name": "Github"
} |
type=driver
plumed_needs=boost_serialization
plumed_modules=drr
arg="--plumed plumed.dat --trajectory-stride 1 --timestep 0.005 --ixyz ala12_trajectory.xyz --dump-forces forces --dump-forces-fmt=%10.6f"
| {
"pile_set_name": "Github"
} |
Lets sing!
♫♪♬♩
Eat food
🍅🍕
| {
"pile_set_name": "Github"
} |
import sqlite3
import time
import datetime
conn = sqlite3.connect('master.db')
c = conn.cursor()
def create_table():
c.execute('CREATE TABLE IF NOT EXISTS tennis(player TEXT, Pinnacle REAL, WillHill REAL, betThreeSixFive REAL, Bookmaker REAL, BetOnline REAL, TheGreekSportsbook REAL, JustBet REAL, SportsInteraction REAL, WagerWeb REAL, FiveDimes REAL)')
"""
Columns are:
player
betfairBack
betfairLay
williamhill
ladbrokes
"""
def dynamic_data_entry(column,entry):
c.execute("INSERT INTO tennis(" + column + ") VALUES(?)",
(str(entry),))
conn.commit()
#The real function will have to be "updating"
def update(player,column,entry):
c.execute('SELECT * FROM tennis')
c.execute("UPDATE tennis SET " + column + " = " + str(entry) + " WHERE player = '" + player + "'")
conn.commit()
def read_from_db(player):
c.execute("SELECT * FROM tennis WHERE player = '" + player + "'")
# data = c.fetchone()
# print data
#for row in c.fetchall():
# print row[1:]
return list(c.fetchall())
create_table()
#c.close()
#conn.close()
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Dapper" version="1.50.4-alpha1-00070" targetFramework="net452" />
<package id="Dapper.Contrib" version="1.50.0" targetFramework="net452" />
<package id="Dapper.Extension" version="1.0.0.1" targetFramework="net452" />
<package id="EntityFramework" version="6.1.3" targetFramework="net452" />
<package id="SyntacticSugar" version="2.4.1" targetFramework="net452" />
</packages> | {
"pile_set_name": "Github"
} |
package com.tencent.mm.ui.chatting;
import android.view.View;
import android.view.ViewStub;
import android.view.animation.AnimationUtils;
import android.widget.ListView;
import com.tencent.mm.e.a.nq;
import com.tencent.mm.plugin.sight.encode.ui.ChattingSightContainerView.a;
import com.tencent.mm.sdk.c.a;
import com.tencent.mm.sdk.platformtools.ac;
import com.tencent.mm.ui.j;
import com.tencent.mm.ui.o;
final class ChattingUI$a$84$2
implements ChattingSightContainerView.a
{
View lBB = null;
ChattingUI$a$84$2(ChattingUI.a.84 param84) {}
public final void azd()
{
nq localnq = new nq();
avS.type = 6;
a.kug.y(localnq);
lBA.lAY.setRequestedOrientation(1);
lBA.lAY.Xk();
lBA.lAY.bkT();
lBA.lAY.blj();
if (lBB == null) {
lBB = ((ViewStub)lBA.lAY.findViewById(2131755932)).inflate();
}
lBB.setVisibility(0);
lBB.startAnimation(AnimationUtils.loadAnimation(lBA.lAY.kNN.kOg, 2130968612));
}
public final void onHide()
{
lBA.lAY.setRequestedOrientation(-1);
lBA.lAY.bkT();
if ((lBB != null) && (lBB.getVisibility() == 0))
{
lBB.setVisibility(8);
lBB.startAnimation(AnimationUtils.loadAnimation(lBA.lAY.kNN.kOg, 2130968613));
}
new ac().post(new Runnable()
{
public final void run()
{
nq localnq = new nq();
avS.type = 7;
avS.avT = ChattingUI.a.e(lBA.lAY).getFirstVisiblePosition();
avS.avU = ChattingUI.a.e(lBA.lAY).getLastVisiblePosition();
avS.avV = ChattingUI.a.e(lBA.lAY).getHeaderViewsCount();
a.kug.y(localnq);
}
});
}
}
/* Location:
* Qualified Name: com.tencent.mm.ui.chatting.ChattingUI.a.84.2
* Java Class Version: 6 (50.0)
* JD-Core Version: 0.7.1
*/ | {
"pile_set_name": "Github"
} |
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=99:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
* May 28, 2008.
*
* The Initial Developer of the Original Code is
* Brendan Eich <brendan@mozilla.org>
*
* Contributor(s):
* David Anderson <danderson@mozilla.com>
* David Mandelin <dmandelin@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#if !defined jsjaeger_methodjit_inl_h__ && defined JS_METHODJIT
#define jsjaeger_methodjit_inl_h__
namespace js {
namespace mjit {
enum CompileRequest
{
CompileRequest_Interpreter,
CompileRequest_JIT
};
/* Number of times a script must be called before we run it in the methodjit. */
static const size_t CALLS_BEFORE_COMPILE = 16;
/* Number of loop back-edges we execute in the interpreter before methodjitting. */
static const size_t BACKEDGES_BEFORE_COMPILE = 16;
static inline CompileStatus
CanMethodJIT(JSContext *cx, JSScript *script, JSStackFrame *fp, CompileRequest request)
{
if (!cx->methodJitEnabled)
return Compile_Abort;
JITScriptStatus status = script->getJITStatus(fp->isConstructing());
if (status == JITScript_Invalid)
return Compile_Abort;
if (request == CompileRequest_Interpreter &&
status == JITScript_None &&
!cx->hasRunOption(JSOPTION_METHODJIT_ALWAYS) &&
script->incCallCount() <= CALLS_BEFORE_COMPILE)
{
return Compile_Skipped;
}
if (status == JITScript_None)
return TryCompile(cx, fp);
return Compile_Okay;
}
/*
* Called from a backedge in the interpreter to decide if we should transition to the
* methodjit. If so, we compile the given function.
*/
static inline CompileStatus
CanMethodJITAtBranch(JSContext *cx, JSScript *script, JSStackFrame *fp, jsbytecode *pc)
{
if (!cx->methodJitEnabled)
return Compile_Abort;
JITScriptStatus status = script->getJITStatus(fp->isConstructing());
if (status == JITScript_Invalid)
return Compile_Abort;
if (status == JITScript_None &&
!cx->hasRunOption(JSOPTION_METHODJIT_ALWAYS) &&
cx->compartment->incBackEdgeCount(pc) <= BACKEDGES_BEFORE_COMPILE)
{
return Compile_Skipped;
}
if (status == JITScript_None)
return TryCompile(cx, fp);
return Compile_Okay;
}
}
}
#endif
| {
"pile_set_name": "Github"
} |
// Code generated by go-swagger; DO NOT EDIT.
package models
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"github.com/go-openapi/errors"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
"github.com/go-openapi/validate"
)
// RegistrationViaAPIResponse The Response for Registration Flows via API
//
// swagger:model registrationViaApiResponse
type RegistrationViaAPIResponse struct {
// identity
// Required: true
Identity *Identity `json:"identity"`
// session
Session *Session `json:"session,omitempty"`
// The Session Token
//
// This field is only set when the session hook is configured as a post-registration hook.
//
// A session token is equivalent to a session cookie, but it can be sent in the HTTP Authorization
// Header:
//
// Authorization: bearer ${session-token}
//
// The session token is only issued for API flows, not for Browser flows!
// Required: true
SessionToken *string `json:"session_token"`
}
// Validate validates this registration via Api response
func (m *RegistrationViaAPIResponse) Validate(formats strfmt.Registry) error {
var res []error
if err := m.validateIdentity(formats); err != nil {
res = append(res, err)
}
if err := m.validateSession(formats); err != nil {
res = append(res, err)
}
if err := m.validateSessionToken(formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (m *RegistrationViaAPIResponse) validateIdentity(formats strfmt.Registry) error {
if err := validate.Required("identity", "body", m.Identity); err != nil {
return err
}
if m.Identity != nil {
if err := m.Identity.Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("identity")
}
return err
}
}
return nil
}
func (m *RegistrationViaAPIResponse) validateSession(formats strfmt.Registry) error {
if swag.IsZero(m.Session) { // not required
return nil
}
if m.Session != nil {
if err := m.Session.Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("session")
}
return err
}
}
return nil
}
func (m *RegistrationViaAPIResponse) validateSessionToken(formats strfmt.Registry) error {
if err := validate.Required("session_token", "body", m.SessionToken); err != nil {
return err
}
return nil
}
// MarshalBinary interface implementation
func (m *RegistrationViaAPIResponse) MarshalBinary() ([]byte, error) {
if m == nil {
return nil, nil
}
return swag.WriteJSON(m)
}
// UnmarshalBinary interface implementation
func (m *RegistrationViaAPIResponse) UnmarshalBinary(b []byte) error {
var res RegistrationViaAPIResponse
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*m = res
return nil
}
| {
"pile_set_name": "Github"
} |
s [ ]
w [a-z0-9A-Z]
W [^a-z0-9A-Z]
d [0-9]
%%
((MERGE.*USING{s}*\()|(EXECUTE{s}*IMMEDIATE{s}*\")|({W}+{d}{s}+HAVING{s}+{d})|(MATCH{s}*[a-zA-Z\\(\\),+\-]+{s}*AGAINST{s}*\()) printf('attack detected');
%%
| {
"pile_set_name": "Github"
} |
PREFIX dc: <http://purl.org/dc/elements/1.1/>
PREFIX ns: <http://example.org/ns#>
SELECT ?title ?price
{ ?x ns:price ?p .
?x ns:discount ?discount
BIND (?p*(1-?discount) AS ?price)
FILTER(?price < 20)
?x dc:title ?title .
}
| {
"pile_set_name": "Github"
} |
package volumes
var _ ResizeService = (*LinuxResizeService)(nil)
| {
"pile_set_name": "Github"
} |
/*
* TupleTypeUtil.java
*
* This source file is part of the FoundationDB open source project
*
* Copyright 2015-2019 Apple Inc. and the FoundationDB project authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.apple.foundationdb.record.metadata;
import com.apple.foundationdb.record.provider.foundationdb.FDBRecordVersion;
import com.apple.foundationdb.tuple.Tuple;
import com.google.protobuf.ByteString;
import com.google.protobuf.Descriptors;
import com.google.protobuf.ProtocolMessageEnum;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
/**
* Utility class for dealing with {@link Tuple} types. In theory, these methods should live in
* {@link com.apple.foundationdb.tuple.TupleHelpers TupleHelpers} except that they use some Protobuf specific things
* like the {@link ByteString} class, and {@code TupleHelpers} is defined in the
* <a href="https://javadoc.io/doc/org.foundationdb/fdb-extensions/">fdb-extensions</a> sub-project
* which does not (and probably should not) take Protobuf as a dependency.
*/
class TupleTypeUtil {
@Nonnull
private static final BigInteger BIG_INT_MAX_LONG = BigInteger.valueOf(Long.MAX_VALUE);
@Nonnull
private static final BigInteger BIG_INT_MIN_LONG = BigInteger.valueOf(Long.MIN_VALUE);
/**
* Normalize a list of values so that it can be checked for equality with other lists sharing
* the same {@link Tuple} representation. In other words, it should be the case that:
*
* <pre> {@code
* toTupleEquivalentValue(list1).equals(toTupleEquivalentValue)
* == Arrays.equals(Tuple.fromList(toTupleAppropriateList(list1)).pack(), Tuple.fromList(toTupleAppropriateList(list2)).pack())
* }</pre>
*
* <p>
* for any two lists {@code list1} and {@code list2}.
* </p>
*
* @param values the list of values to normalized
* @return a new list containing the normalized elements of {@code values}
*/
@Nonnull
static List<Object> toTupleEquivalentList(@Nonnull List<?> values) {
List<Object> tupleEquivalentList = new ArrayList<>(values.size());
for (Object o : values) {
tupleEquivalentList.add(toTupleEquivalentValue(o));
}
return tupleEquivalentList;
}
/**
* Normalize a value so that it compares equal to anything with the same {@link Tuple} representation.
* The value that is returned cannot necessarily be packed by a {@code Tuple} (for example,
* a <code>byte[]</code> is returned as a {@link ByteString}), but it does implement {@link Object#equals(Object)}
* and {@link Object#hashCode()}, so the value can be used in hash-based data structures like
* {@link java.util.HashSet HashSet}s and {@link java.util.HashMap HashMap}s. In other words, it should
* bethe case that:
*
* <pre> {@code
* Objects.equals(toTupleEquivalentValue(value1), toTupleEquivalentValue(value2))
* == Arrays.equals(Tuple.from(value1).pack(), Tuple.from(value2).pack())
* }</pre>
*
* <p>
* for any two values {@code value1} and {@code value2}.
* </p>
*
* <p>
* This will only return {@code null} if {@link #toTupleAppropriateValue(Object)} would return {@code null}
* on the same input. If the object is already in
* </p>
*
* @param obj the value to normalize
* @return a value that has the same representation when {@link Tuple}-encoded
*/
@Nullable
static Object toTupleEquivalentValue(@Nullable Object obj) {
if (obj == null || obj instanceof Key.Evaluated.NullStandin) {
return null;
} else if (obj instanceof List<?>) {
List<?> list = (List<?>)obj;
return toTupleEquivalentList(list);
} else if (obj instanceof Tuple) {
return toTupleEquivalentList(((Tuple)obj).getItems());
} else if (obj instanceof byte[]) {
return ByteString.copyFrom((byte[]) obj);
} else if ((obj instanceof Byte) || (obj instanceof Short) || (obj instanceof Integer)) {
return ((Number)obj).longValue();
} else if (obj instanceof BigInteger) {
BigInteger bigInt = (BigInteger)obj;
if (bigInt.compareTo(BIG_INT_MIN_LONG) > 0 && bigInt.compareTo(BIG_INT_MAX_LONG) < 0) {
return bigInt.longValue();
} else {
return bigInt;
}
} else if (obj instanceof ProtocolMessageEnum) {
return (long)((ProtocolMessageEnum)obj).getNumber();
} else if (obj instanceof Descriptors.EnumValueDescriptor) {
return (long)((Descriptors.EnumValueDescriptor)obj).getNumber();
} else if (obj instanceof FDBRecordVersion) {
return ((FDBRecordVersion)obj).toVersionstamp(false);
} else {
return obj;
}
}
/**
* Convert a list of values into items that can all be stored within a {@link Tuple}.
*
* @param values a list of values
* @return a new list with {@link Tuple}-encodable versions of the elements of {@code values}
*/
@Nonnull
static List<Object> toTupleAppropriateList(@Nonnull List<?> values) {
List<Object> tupleAppropriateList = new ArrayList<>(values.size());
for (Object o : values) {
tupleAppropriateList.add(toTupleAppropriateValue(o));
}
return tupleAppropriateList;
}
/**
* Convert a value into a type that can be stored within a {@link Tuple}.
*
* @param obj the value to convert
* @return the value converted to some {@link Tuple}-encodable type
*/
@Nullable
static Object toTupleAppropriateValue(@Nullable Object obj) {
if (obj instanceof Key.Evaluated.NullStandin) {
return null;
} else if (obj instanceof ByteString) {
return ((ByteString) obj).toByteArray();
} else if (obj instanceof List) {
return toTupleAppropriateList((List<?>) obj);
// Following two are both Internal.EnumLite, so could use that, too.
} else if (obj instanceof ProtocolMessageEnum) {
return ((ProtocolMessageEnum) obj).getNumber();
} else if (obj instanceof Descriptors.EnumValueDescriptor) {
return ((Descriptors.EnumValueDescriptor) obj).getNumber();
} else if (obj instanceof FDBRecordVersion) {
return ((FDBRecordVersion) obj).toVersionstamp(false);
} else {
return obj;
}
}
private TupleTypeUtil() {
}
}
| {
"pile_set_name": "Github"
} |
---
"Missing document with catch":
- do:
catch: missing
get:
index: test_1
id: 1
---
"Missing document with ignore":
- do:
get:
index: test_1
id: 1
ignore: 404
| {
"pile_set_name": "Github"
} |
# coding=utf-8
import typing
from pyramid.config import Configurator
import transaction
from tracim_backend.app_models.contents import FOLDER_TYPE
from tracim_backend.app_models.contents import content_type_list
from tracim_backend.config import CFG
from tracim_backend.exceptions import ContentFilenameAlreadyUsedInFolder
from tracim_backend.exceptions import EmptyLabelNotAllowed
from tracim_backend.extensions import hapic
from tracim_backend.lib.core.content import ContentApi
from tracim_backend.lib.utils.authorization import ContentTypeChecker
from tracim_backend.lib.utils.authorization import check_right
from tracim_backend.lib.utils.authorization import is_contributor
from tracim_backend.lib.utils.authorization import is_reader
from tracim_backend.lib.utils.request import TracimRequest
from tracim_backend.lib.utils.utils import generate_documentation_swagger_tag
from tracim_backend.models.context_models import ContentInContext
from tracim_backend.models.context_models import RevisionInContext
from tracim_backend.models.revision_protection import new_revision
from tracim_backend.views.controllers import Controller
from tracim_backend.views.core_api.schemas import FolderContentModifySchema
from tracim_backend.views.core_api.schemas import NoContentSchema
from tracim_backend.views.core_api.schemas import SetContentStatusSchema
from tracim_backend.views.core_api.schemas import TextBasedContentSchema
from tracim_backend.views.core_api.schemas import TextBasedRevisionSchema
from tracim_backend.views.core_api.schemas import WorkspaceAndContentIdPathSchema
from tracim_backend.views.swagger_generic_section import SWAGGER_TAG__CONTENT_ENDPOINTS
try: # Python 3.5+
from http import HTTPStatus
except ImportError:
from http import client as HTTPStatus
SWAGGER_TAG__CONTENT_FOLDER_SECTION = "Folders"
SWAGGER_TAG__CONTENT_FOLDER_ENDPOINTS = generate_documentation_swagger_tag(
SWAGGER_TAG__CONTENT_ENDPOINTS, SWAGGER_TAG__CONTENT_FOLDER_SECTION
)
is_folder_content = ContentTypeChecker([FOLDER_TYPE])
class FolderController(Controller):
@hapic.with_api_doc(tags=[SWAGGER_TAG__CONTENT_FOLDER_ENDPOINTS])
@check_right(is_reader)
@check_right(is_folder_content)
@hapic.input_path(WorkspaceAndContentIdPathSchema())
@hapic.output_body(TextBasedContentSchema())
def get_folder(self, context, request: TracimRequest, hapic_data=None) -> ContentInContext:
"""
Get folder info
"""
app_config = request.registry.settings["CFG"] # type: CFG
api = ContentApi(
show_archived=True,
show_deleted=True,
current_user=request.current_user,
session=request.dbsession,
config=app_config,
)
content = api.get_one(hapic_data.path.content_id, content_type=content_type_list.Any_SLUG)
return api.get_content_in_context(content)
@hapic.with_api_doc(tags=[SWAGGER_TAG__CONTENT_FOLDER_ENDPOINTS])
@hapic.handle_exception(EmptyLabelNotAllowed, HTTPStatus.BAD_REQUEST)
@hapic.handle_exception(ContentFilenameAlreadyUsedInFolder, HTTPStatus.BAD_REQUEST)
@check_right(is_contributor)
@check_right(is_folder_content)
@hapic.input_path(WorkspaceAndContentIdPathSchema())
@hapic.input_body(FolderContentModifySchema())
@hapic.output_body(TextBasedContentSchema())
def update_folder(self, context, request: TracimRequest, hapic_data=None) -> ContentInContext:
"""
update folder
"""
app_config = request.registry.settings["CFG"] # type: CFG
api = ContentApi(
show_archived=True,
show_deleted=True,
current_user=request.current_user,
session=request.dbsession,
config=app_config,
)
content = api.get_one(hapic_data.path.content_id, content_type=content_type_list.Any_SLUG)
with new_revision(session=request.dbsession, tm=transaction.manager, content=content):
api.update_container_content(
item=content,
new_label=hapic_data.body.label,
new_content=hapic_data.body.raw_content,
allowed_content_type_slug_list=hapic_data.body.sub_content_types,
)
api.save(content)
api.execute_update_content_actions(content)
return api.get_content_in_context(content)
@hapic.with_api_doc(tags=[SWAGGER_TAG__CONTENT_FOLDER_ENDPOINTS])
@check_right(is_reader)
@check_right(is_folder_content)
@hapic.input_path(WorkspaceAndContentIdPathSchema())
@hapic.output_body(TextBasedRevisionSchema(many=True))
def get_folder_revisions(
self, context, request: TracimRequest, hapic_data=None
) -> typing.List[RevisionInContext]:
"""
get folder revisions
"""
app_config = request.registry.settings["CFG"] # type: CFG
api = ContentApi(
show_archived=True,
show_deleted=True,
current_user=request.current_user,
session=request.dbsession,
config=app_config,
)
content = api.get_one(hapic_data.path.content_id, content_type=content_type_list.Any_SLUG)
revisions = content.revisions
return [api.get_revision_in_context(revision) for revision in revisions]
@hapic.with_api_doc(tags=[SWAGGER_TAG__CONTENT_FOLDER_ENDPOINTS])
@check_right(is_contributor)
@check_right(is_folder_content)
@hapic.input_path(WorkspaceAndContentIdPathSchema())
@hapic.input_body(SetContentStatusSchema())
@hapic.output_body(NoContentSchema(), default_http_code=HTTPStatus.NO_CONTENT)
def set_folder_status(self, context, request: TracimRequest, hapic_data=None) -> None:
"""
set folder status
"""
app_config = request.registry.settings["CFG"] # type: CFG
api = ContentApi(
show_archived=True,
show_deleted=True,
current_user=request.current_user,
session=request.dbsession,
config=app_config,
)
content = api.get_one(hapic_data.path.content_id, content_type=content_type_list.Any_SLUG)
with new_revision(session=request.dbsession, tm=transaction.manager, content=content):
api.set_status(content, hapic_data.body.status)
api.save(content)
api.execute_update_content_actions(content)
return
def bind(self, configurator: Configurator) -> None:
# Get folder
configurator.add_route(
"folder", "/workspaces/{workspace_id}/folders/{content_id}", request_method="GET"
)
configurator.add_view(self.get_folder, route_name="folder")
# update folder
configurator.add_route(
"update_folder", "/workspaces/{workspace_id}/folders/{content_id}", request_method="PUT"
)
configurator.add_view(self.update_folder, route_name="update_folder")
# get folder revisions
configurator.add_route(
"folder_revisions",
"/workspaces/{workspace_id}/folders/{content_id}/revisions",
request_method="GET",
)
configurator.add_view(self.get_folder_revisions, route_name="folder_revisions")
# get folder revisions
configurator.add_route(
"set_folder_status",
"/workspaces/{workspace_id}/folders/{content_id}/status",
request_method="PUT",
)
configurator.add_view(self.set_folder_status, route_name="set_folder_status")
| {
"pile_set_name": "Github"
} |
#coding=utf-8
'''
Created on 2015-11-4
@author: zhangtiande
'''
from django.shortcuts import HttpResponse
from teamvision.project.models import Project,Tag
from django.contrib.auth.models import User
from business.ucenter.account_service import AccountService
class VM_AdminUser(object):
'''
classdocs
'''
def __init__(self,user,is_create=False):
self.user=user
self.is_create=is_create
self.admin=""
self.manager=""
self.default_group=""
self.set_user_group()
def user_active(self):
result="finished-check fa-check-square"
if not self.user.is_active:
result="fa-square-o unfinished-check"
return result
def user_name(self):
return self.user.email
def user_full_name(self):
result=self.user.username
if self.user.last_name and self.user.first_name:
result=self.user.last_name+self.user.first_name
return result
def user_avatar(self):
result="/static/global/images/fruit-avatar/Fruit-1.png"
if self.user.extend_info:
result=AccountService.get_avatar_url(self.user)
return result
def user_groups(self):
return self.user.groups.all()
def form_id(self):
result="user_edit_form"
if self.is_create:
result="user_create_form"
return result
def set_user_group(self):
if self.user:
if self.user.groups.all().filter(id=27):
self.admin="checked"
elif self.user.groups.all().filter(id=28):
self.manager="checked"
else:
self.default_group="checked"
| {
"pile_set_name": "Github"
} |
/** @file
Intel Processor Power Management ACPI Code.
Copyright (c) 2018 - 2019, Intel Corporation. All rights reserved.<BR>
SPDX-License-Identifier: BSD-2-Clause-Patent
**/
#include "CpuPowerMgmt.h"
DefinitionBlock (
"CPU0PSD.aml",
"SSDT",
0x02,
"PmRef",
"Cpu0Psd",
0x3000
)
{
External(\PC00, IntObj)
External(\TCNT, FieldUnitObj)
External(\_SB.CFGD, FieldUnitObj)
External(\_SB.PR00, DeviceObj)
Scope(\_SB.PR00)
{
Name(HPSD,Package() // HW_ALL
{
Package() {5, // NumEntries. Current Value is 5.
0, // Revision. Current Value is 0.
0, // Domain.
0xFE, // Coordination type 0xFE = HW_ALL
0x80 // Number of processors.
}
})
Name(SPSD,Package() // SW_ALL
{
Package() {5, // NumEntries. Current Value is 5.
0, // Revision. Current Value is 0.
0, // Domain.
0xFC, // Coordination type 0xFC = SW_ALL
0x80 // Number of processors.
}
})
//
// The _PSD object provides information to the OSPM related
// to P-State coordination between processors in a multi-processor
// configurations.
//
Method(_PSD,0)
{
If (And(\_SB.CFGD, PPM_TURBO_BOOST_MAX)) // Intel Turbo Boost Max 3.0
{
Store (0, Index(DerefOf(Index(HPSD, 0)),2)) // Domain
Store (1, Index(DerefOf(Index(HPSD, 0)),4)) // Number of processors belonging to the domain.
} Else {
Store (TCNT, Index(DerefOf(Index(HPSD, 0)),4))
Store (TCNT, Index(DerefOf(Index(SPSD, 0)),4))
}
If(And(PC00,0x0800)) // If Hardware co-ordination of P states
{
Return(HPSD)
}
Return(SPSD)
}
} // End of Scope(\_SB.PR00)
} // End of Definition Block
| {
"pile_set_name": "Github"
} |
/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
package v1
type JobExpansion interface{}
| {
"pile_set_name": "Github"
} |
using System;
using ModuleManager.Progress;
namespace ModuleManager.Patches.PassSpecifiers
{
public class LegacyPassSpecifier : IPassSpecifier
{
public bool CheckNeeds(INeedsChecker needsChecker, IPatchProgress progress)
{
if (needsChecker == null) throw new ArgumentNullException(nameof(needsChecker));
if (progress == null) throw new ArgumentNullException(nameof(progress));
return true;
}
public string Descriptor => ":LEGACY (default)";
}
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html lang="en" data-navbar="/account/navbar-profile.html">
<head>
<meta charset="utf-8" />
<title translate="yes">Establecer o perfil predeterminado</title>
<link href="/public/pure-min.css" rel="stylesheet">
<link href="/public/content.css" rel="stylesheet">
<link href="/public/content-additional.css" rel="stylesheet">
<base target="_top" href="/">
</head>
<body>
<h1 translate="yes">Establecer o perfil predeterminado</h1>
<p translate="yes">O teu perfil predeterminado serve como principal punto de contacto da túa conta.</p>
<div id="message-container"></div>
<form id="submit-form" method="post" class="pure-form" action="/account/set-default-profile" name="submit-form">
<fieldset>
<div class="pure-control-group">
<select id="profileid" name="profileid">
<option value="" translate="yes">
Selecciona perfil
</option>
</select>
</div>
<button id="submit-button" type="submit" class="pure-button pure-button-primary" translate="yes">Establecer o perfil predeterminado</button>
</fieldset>
</form>
<template id="success">
<div class="success message" translate="yes">
Éxito! O perfil é o teu estándar
</div>
</template>
<template id="unknown-error">
<div class="error message" translate="yes">
Erro! Produciuse un erro descoñecido
</div>
</template>
<template id="default-profile">
<div class="error message" translate="yes">
Erro! Este é xa o teu perfil predeterminado
</div>
</template>
<template id="profile-option">
<option value="${profile.profileid}">
${profile.contactEmail}, ${profile.firstName} ${profile.lastName}
</option>
</template>
</body>
</html>
| {
"pile_set_name": "Github"
} |
require([
'gitbook'
], function (gitbook) {
gitbook.events.bind('page.change', function () {
mermaid.init();
});
}); | {
"pile_set_name": "Github"
} |
TODO: Implement depth-major-sources packing paths for NEON
Platforms: ARM NEON
Coding time: M
Experimentation time: M
Skill required: M
Prerequisite reading:
doc/kernels.txt
doc/packing.txt
Model to follow/adapt:
internal/pack_neon.h
At the moment we have NEON optimized packing paths for WidthMajor sources.
We also need paths for DepthMajor sources.
This is harder because for DepthMajor sources, the size of each slice that
we have to load is the kernel's width, which is typically 12 (for the LHS)
or 4 (for the RHS). That's not very friendly to NEON vector-load instructions
which would allow us to load 8 or 16 entries, but not 4 or 12.
So you will have to load 4 entries at a time only. For that, the
vld1q_lane_u32 seems to be as good as you'll get. The other possible
approach would be to load (with plain scalar C++) four uint32's into a
temporary local buffer, and use vld1q_u8 on that. Some experimentation
will be useful here. For that, you can generate assembly with -save-temps
and make assembly easier to inspect by inserting inline assembly comments
such as
asm volatile("#hello");
| {
"pile_set_name": "Github"
} |
package de.peeeq.wurstscript.utils;
import de.peeeq.wurstscript.WLogger;
public class ExecutiontimeMeasure implements AutoCloseable {
private String message;
private long startTime;
public ExecutiontimeMeasure(String message) {
this.message = message;
this.startTime = System.currentTimeMillis();
}
@Override
public void close() {
long time = System.currentTimeMillis() - startTime;
WLogger.info("Executed " + message + " in " + time + "ms.");
}
}
| {
"pile_set_name": "Github"
} |
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by AsyncGenerator.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
using System.Collections.Generic;
using NUnit.Framework;
using NHibernate.Criterion;
namespace NHibernate.Test.NHSpecificTest.NH2546
{
using System.Threading.Tasks;
[TestFixture]
public class SetCommandParameterSizesFalseFixtureAsync : BugTestCase
{
protected override bool AppliesTo(Dialect.Dialect dialect)
{
return dialect is Dialect.MsSql2008Dialect;
}
protected override void OnSetUp()
{
using (ISession session = Sfi.OpenSession())
{
session.Persist(new Student() { StringTypeWithLengthDefined = "Julian Maughan" });
session.Persist(new Student() { StringTypeWithLengthDefined = "Bill Clinton" });
session.Flush();
}
}
protected override void OnTearDown()
{
using (ISession session = Sfi.OpenSession())
{
session.CreateQuery("delete from Student").ExecuteUpdate();
session.Flush();
}
base.OnTearDown();
}
[Test]
public async Task LikeExpressionWithinDefinedTypeSizeAsync()
{
using (ISession session = Sfi.OpenSession())
{
ICriteria criteria = session
.CreateCriteria<Student>()
.Add(Restrictions.Like("StringTypeWithLengthDefined", "Julian%"));
IList<Student> list = await (criteria.ListAsync<Student>());
Assert.That(list.Count, Is.EqualTo(1));
}
}
[Test]
public async Task LikeExpressionExceedsDefinedTypeSizeAsync()
{
// In this case we are forcing the usage of LikeExpression class where the length of the associated property is ignored
using (ISession session = Sfi.OpenSession())
{
ICriteria criteria = session
.CreateCriteria<Student>()
.Add(Restrictions.Like("StringTypeWithLengthDefined", "[a-z][a-z][a-z]ian%", MatchMode.Exact, null));
IList<Student> list = await (criteria.ListAsync<Student>());
Assert.That(list.Count, Is.EqualTo(1));
}
}
}
}
| {
"pile_set_name": "Github"
} |
AxisControlBus
ControlBus
PathPlanning1
PathPlanning6
PathToAxisControlBus
GearType1
GearType2
Motor
Controller
AxisType1
AxisType2
MechanicalStructure
| {
"pile_set_name": "Github"
} |
文件说明:
1、base_dic_full.dic
hash索引 -- 字典带有词频和词性标志。
2、words_addons.dic
s 开头的表示停止词 u 后缀词(地名后缀、数学单位等) n 前导词(姓、汉字数词等) a 后导词(地区,部门等)
3、 not-build/base_dic_full.txt
没编译过的词典源码
4、重新编译词典的方法:
<?php
header('Content-Type: text/html; charset=utf-8');
require_once('phpanalysis.class.php');
$pa = new PhpAnalysis('utf-8', 'utf-8', false);
$pa->MakeDict( sourcefile, 16 , 'dict/base_dic_full.dic');
echo "OK";
?> | {
"pile_set_name": "Github"
} |
DataverseUse test
Set import-private-functions=true
Query:
Let Variable [ Name=$txt ]
:=
LiteralExpr [STRING] [Hello World, I would like to inform you of the importance of Foo Bar. Yes, Foo Bar. Jürgen.]
Let Variable [ Name=$tokens ]
:=
FunctionCall asterix.hashed-word-tokens@1[
Variable [ Name=$txt ]
]
SELECT ELEMENT [
Variable [ Name=$token ]
]
FROM [ Variable [ Name=$tokens ]
AS Variable [ Name=$token ]
]
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" ?>
<component id="root" name="root">
<component id="system" name="system">
<!--McPAT will skip the components if number is set to 0 -->
<param name="number_of_cores" value="64"/>
<param name="number_of_L1Directories" value="0"/>
<param name="number_of_L2Directories" value="0"/>
<param name="number_of_L2s" value="64"/> <!-- This number means how many L2 clusters in each cluster there can be multiple banks/ports -->
<param name="number_of_L3s" value="0"/> <!-- This number means how many L3 clusters -->
<param name="number_of_NoCs" value="1"/>
<param name="homogeneous_cores" value="1"/><!--1 means homo -->
<param name="homogeneous_L2s" value="1"/>
<param name="homogeneous_L1Directorys" value="1"/>
<param name="homogeneous_L2Directorys" value="1"/>
<param name="homogeneous_L3s" value="1"/>
<param name="homogeneous_ccs" value="1"/><!--cache coherece hardware -->
<param name="homogeneous_NoCs" value="1"/>
<param name="core_tech_node" value="22"/><!-- nm -->
<param name="target_core_clockrate" value="3500"/><!--MHz -->
<param name="temperature" value="360"/> <!-- Kelvin -->
<param name="number_cache_levels" value="2"/>
<param name="interconnect_projection_type" value="0"/><!--0: agressive wire technology; 1: conservative wire technology -->
<param name="device_type" value="0"/><!--0: HP(High Performance Type); 1: LSTP(Low standby power) 2: LOP (Low Operating Power) -->
<param name="longer_channel_device" value="1"/><!-- 0 no use; 1 use when possible -->
<param name="machine_bits" value="64"/>
<param name="virtual_address_width" value="64"/>
<param name="physical_address_width" value="52"/>
<param name="virtual_memory_page_size" value="4096"/>
<stat name="total_cycles" value="100000"/>
<stat name="idle_cycles" value="0"/>
<stat name="busy_cycles" value="100000"/>
<!--This page size(B) is complete different from the page size in Main memo secction. this page size is the size of
virtual memory from OS/Archi perspective; the page size in Main memo secction is the actuall physical line in a DRAM bank -->
<!-- *********************** cores ******************* -->
<component id="system.core0" name="core0">
<!-- Core property -->
<param name="clock_rate" value="3500"/>
<param name="instruction_length" value="32"/>
<param name="opcode_width" value="9"/>
<!-- address width determins the tag_width in Cache, LSQ and buffers in cache controller
default value is machine_bits, if not set -->
<param name="machine_type" value="1"/><!-- 1 inorder; 0 OOO-->
<!-- inorder/OoO -->
<param name="number_hardware_threads" value="4"/>
<!-- number_instruction_fetch_ports(icache ports) is always 1 in single-thread processor,
it only may be more than one in SMT processors. BTB ports always equals to fetch ports since
branch information in consective branch instructions in the same fetch group can be read out from BTB once.-->
<param name="fetch_width" value="1"/>
<!-- fetch_width determins the size of cachelines of L1 cache block -->
<param name="number_instruction_fetch_ports" value="1"/>
<param name="decode_width" value="1"/>
<!-- decode_width determins the number of ports of the
renaming table (both RAM and CAM) scheme -->
<param name="issue_width" value="1"/>
<!-- issue_width determins the number of ports of Issue window and other logic
as in the complexity effective proccessors paper; issue_width==dispatch_width -->
<param name="commit_width" value="1"/>
<!-- commit_width determins the number of ports of register files -->
<param name="fp_issue_width" value="1"/>
<param name="prediction_width" value="0"/>
<!-- number of branch instructions can be predicted simultannouesl-->
<!-- Current version of McPAT does not distinguish int and floating point pipelines
Theses parameters are reserved for future use.-->
<param name="pipelines_per_core" value="1,1"/>
<!--integer_pipeline and floating_pipelines, if the floating_pipelines is 0, then the pipeline is shared-->
<param name="pipeline_depth" value="6,6"/>
<!-- pipeline depth of int and fp, if pipeline is shared, the second number is the average cycles of fp ops -->
<!-- issue and exe unit-->
<param name="ALU_per_core" value="1"/>
<!-- contains an adder, a shifter, and a logical unit -->
<param name="MUL_per_core" value="1"/>
<!-- For MUL and Div -->
<param name="FPU_per_core" value="0.125"/>
<!-- buffer between IF and ID stage -->
<param name="instruction_buffer_size" value="16"/>
<!-- buffer between ID and sche/exe stage -->
<param name="decoded_stream_buffer_size" value="16"/>
<param name="instruction_window_scheme" value="0"/><!-- 0 PHYREG based, 1 RSBASED-->
<!-- McPAT support 2 types of OoO cores, RS based and physical reg based-->
<param name="instruction_window_size" value="16"/>
<param name="fp_instruction_window_size" value="16"/>
<!-- the instruction issue Q as in Alpha 21264; The RS as in Intel P6 -->
<param name="ROB_size" value="80"/>
<!-- each in-flight instruction has an entry in ROB -->
<!-- registers -->
<param name="archi_Regs_IRF_size" value="32"/>
<param name="archi_Regs_FRF_size" value="32"/>
<!-- if OoO processor, phy_reg number is needed for renaming logic,
renaming logic is for both integer and floating point insts. -->
<param name="phy_Regs_IRF_size" value="80"/>
<param name="phy_Regs_FRF_size" value="80"/>
<!-- rename logic -->
<param name="rename_scheme" value="0"/>
<!-- can be RAM based(0) or CAM based(1) rename scheme
RAM-based scheme will have free list, status table;
CAM-based scheme have the valid bit in the data field of the CAM
both RAM and CAM need RAM-based checkpoint table, checkpoint_depth=# of in_flight instructions;
Detailed RAT Implementation see TR -->
<param name="register_windows_size" value="8"/>
<!-- how many windows in the windowed register file, sun processors;
no register windowing is used when this number is 0 -->
<!-- In OoO cores, loads and stores can be issued whether inorder(Pentium Pro) or (OoO)out-of-order(Alpha),
They will always try to exeute out-of-order though. -->
<param name="LSU_order" value="inorder"/>
<param name="store_buffer_size" value="32"/>
<!-- By default, in-order cores do not have load buffers -->
<param name="load_buffer_size" value="32"/>
<!-- number of ports refer to sustainable concurrent memory accesses -->
<param name="memory_ports" value="1"/>
<!-- max_allowed_in_flight_memo_instructions determins the # of ports of load and store buffer
as well as the ports of Dcache which is connected to LSU -->
<!-- dual-pumped Dcache can be used to save the extra read/write ports -->
<param name="RAS_size" value="32"/>
<!-- general stats, defines simulation periods;require total, idle, and busy cycles for senity check -->
<!-- please note: if target architecture is X86, then all the instrucions refer to (fused) micro-ops -->
<stat name="total_instructions" value="800000"/>
<stat name="int_instructions" value="600000"/>
<stat name="fp_instructions" value="20000"/>
<stat name="branch_instructions" value="0"/>
<stat name="branch_mispredictions" value="0"/>
<stat name="load_instructions" value="100000"/>
<stat name="store_instructions" value="100000"/>
<stat name="committed_instructions" value="800000"/>
<stat name="committed_int_instructions" value="600000"/>
<stat name="committed_fp_instructions" value="20000"/>
<stat name="pipeline_duty_cycle" value="0.6"/><!--<=1, runtime_ipc/peak_ipc; averaged for all cores if homogenous -->
<!-- the following cycle stats are used for heterogeneouse cores only,
please ignore them if homogeneouse cores -->
<stat name="total_cycles" value="100000"/>
<stat name="idle_cycles" value="0"/>
<stat name="busy_cycles" value="100000"/>
<!-- instruction buffer stats -->
<!-- ROB stats, both RS and Phy based OoOs have ROB
performance simulator should capture the difference on accesses,
otherwise, McPAT has to guess based on number of commited instructions. -->
<stat name="ROB_reads" value="263886"/>
<stat name="ROB_writes" value="263886"/>
<!-- RAT accesses -->
<stat name="rename_accesses" value="263886"/>
<stat name="fp_rename_accesses" value="263886"/>
<!-- decode and rename stage use this, should be total ic - nop -->
<!-- Inst window stats -->
<stat name="inst_window_reads" value="263886"/>
<stat name="inst_window_writes" value="263886"/>
<stat name="inst_window_wakeup_accesses" value="263886"/>
<stat name="fp_inst_window_reads" value="263886"/>
<stat name="fp_inst_window_writes" value="263886"/>
<stat name="fp_inst_window_wakeup_accesses" value="263886"/>
<!-- RF accesses -->
<stat name="int_regfile_reads" value="1600000"/>
<stat name="float_regfile_reads" value="40000"/>
<stat name="int_regfile_writes" value="800000"/>
<stat name="float_regfile_writes" value="20000"/>
<!-- accesses to the working reg -->
<stat name="function_calls" value="5"/>
<stat name="context_switches" value="260343"/>
<!-- Number of Windowes switches (number of function calls and returns)-->
<!-- Alu stats by default, the processor has one FPU that includes the divider and
multiplier. The fpu accesses should include accesses to multiplier and divider -->
<stat name="ialu_accesses" value="800000"/>
<stat name="fpu_accesses" value="10000"/>
<stat name="mul_accesses" value="100000"/>
<stat name="cdb_alu_accesses" value="1000000"/>
<stat name="cdb_mul_accesses" value="0"/>
<stat name="cdb_fpu_accesses" value="0"/>
<!-- multiple cycle accesses should be counted multiple times,
otherwise, McPAT can use internal counter for different floating point instructions
to get final accesses. But that needs detailed info for floating point inst mix -->
<!-- currently the performance simulator should
make sure all the numbers are final numbers,
including the explicit read/write accesses,
and the implicite accesses such as replacements and etc.
Future versions of McPAT may be able to reason the implicite access
based on param and stats of last level cache
The same rule applies to all cache access stats too! -->
<!-- following is AF for max power computation.
Do not change them, unless you understand them-->
<stat name="IFU_duty_cycle" value="0.25"/>
<stat name="LSU_duty_cycle" value="0.25"/>
<stat name="MemManU_I_duty_cycle" value="1"/>
<stat name="MemManU_D_duty_cycle" value="0.25"/>
<stat name="ALU_duty_cycle" value="0.9"/>
<stat name="MUL_duty_cycle" value="0.5"/>
<stat name="FPU_duty_cycle" value="0.4"/>
<stat name="ALU_cdb_duty_cycle" value="0.9"/>
<stat name="MUL_cdb_duty_cycle" value="0.5"/>
<stat name="FPU_cdb_duty_cycle" value="0.4"/>
<component id="system.core0.predictor" name="PBT">
<!-- branch predictor; tournament predictor see Alpha implementation -->
<param name="local_predictor_size" value="10,3"/>
<param name="local_predictor_entries" value="1024"/>
<param name="global_predictor_entries" value="4096"/>
<param name="global_predictor_bits" value="2"/>
<param name="chooser_predictor_entries" value="4096"/>
<param name="chooser_predictor_bits" value="2"/>
<!-- These parameters can be combined like below in next version
<param name="load_predictor" value="10,3,1024"/>
<param name="global_predictor" value="4096,2"/>
<param name="predictor_chooser" value="4096,2"/>
-->
</component>
<component id="system.core0.itlb" name="itlb">
<param name="number_entries" value="64"/>
<stat name="total_accesses" value="800000"/>
<stat name="total_misses" value="4"/>
<stat name="conflicts" value="0"/>
<!-- there is no write requests to itlb although writes happen to itlb after miss,
which is actually a replacement -->
</component>
<component id="system.core0.icache" name="icache">
<!-- there is no write requests to itlb although writes happen to it after miss,
which is actually a replacement -->
<param name="icache_config" value="16384,32,4,1,1,3,8,0"/>
<!-- the parameters are capacity,block_width, associativity, bank, throughput w.r.t. core clock, latency w.r.t. core clock,output_width, cache policy -->
<!-- cache_policy;//0 no write or write-though with non-write allocate;1 write-back with write-allocate -->
<param name="buffer_sizes" value="16, 16, 16,0"/>
<!-- cache controller buffer sizes: miss_buffer_size(MSHR),fill_buffer_size,prefetch_buffer_size,wb_buffer_size-->
<stat name="read_accesses" value="200000"/>
<stat name="read_misses" value="0"/>
<stat name="conflicts" value="0"/>
</component>
<component id="system.core0.dtlb" name="dtlb">
<param name="number_entries" value="64"/>
<stat name="total_accesses" value="200000"/>
<stat name="total_misses" value="4"/>
<stat name="conflicts" value="0"/>
</component>
<component id="system.core0.dcache" name="dcache">
<!-- all the buffer related are optional -->
<param name="dcache_config" value="8192,16,4,1,1,3,16,0"/>
<param name="buffer_sizes" value="16, 16, 16, 16"/>
<!-- cache controller buffer sizes: miss_buffer_size(MSHR),fill_buffer_size,prefetch_buffer_size,wb_buffer_size-->
<stat name="read_accesses" value="200000"/>
<stat name="write_accesses" value="27276"/>
<stat name="read_misses" value="1632"/>
<stat name="write_misses" value="183"/>
<stat name="conflicts" value="0"/>
</component>
<component id="system.core0.BTB" name="BTB">
<!-- all the buffer related are optional -->
<param name="BTB_config" value="8192,4,2,1, 1,3"/>
<!-- the parameters are capacity,block_width,associativity,bank, throughput w.r.t. core clock, latency w.r.t. core clock,-->
</component>
</component>
<component id="system.L1Directory0" name="L1Directory0">
<param name="Directory_type" value="0"/>
<!--0 cam based shadowed tag. 1 directory cache -->
<param name="Dir_config" value="2048,1,0,1, 4, 4,8"/>
<!-- the parameters are capacity,block_width, associativity,bank, throughput w.r.t. core clock, latency w.r.t. core clock,-->
<param name="buffer_sizes" value="8, 8, 8, 8"/>
<!-- all the buffer related are optional -->
<param name="clockrate" value="3500"/>
<param name="ports" value="1,1,1"/>
<!-- number of r, w, and rw search ports -->
<param name="device_type" value="0"/>
<!-- altough there are multiple access types,
Performance simulator needs to cast them into reads or writes
e.g. the invalidates can be considered as writes -->
<stat name="read_accesses" value="800000"/>
<stat name="write_accesses" value="27276"/>
<stat name="read_misses" value="1632"/>
<stat name="write_misses" value="183"/>
<stat name="conflicts" value="20"/>
<stat name="duty_cycle" value="0.45"/>
</component>
<component id="system.L2Directory0" name="L2Directory0">
<param name="Directory_type" value="1"/>
<!--0 cam based shadowed tag. 1 directory cache -->
<param name="Dir_config" value="1048576,16,16,1,2, 100"/>
<!-- the parameters are capacity,block_width, associativity,bank, throughput w.r.t. core clock, latency w.r.t. core clock,-->
<param name="buffer_sizes" value="8, 8, 8, 8"/>
<!-- all the buffer related are optional -->
<param name="clockrate" value="3500"/>
<param name="ports" value="1,1,1"/>
<!-- number of r, w, and rw search ports -->
<param name="device_type" value="0"/>
<!-- altough there are multiple access types,
Performance simulator needs to cast them into reads or writes
e.g. the invalidates can be considered as writes -->
<stat name="read_accesses" value="58824"/>
<stat name="write_accesses" value="27276"/>
<stat name="read_misses" value="1632"/>
<stat name="write_misses" value="183"/>
<stat name="conflicts" value="100"/>
<stat name="duty_cycle" value="0.45"/>
</component>
<component id="system.L20" name="L20">
<!-- all the buffer related are optional -->
<param name="L2_config" value="1048576,64,16,1, 4,23, 64, 1"/>
<!-- consider 4-way bank interleaving for Niagara 1 -->
<!-- the parameters are capacity,block_width, associativity, bank, throughput w.r.t. core clock, latency w.r.t. core clock,output_width, cache policy -->
<param name="buffer_sizes" value="16, 16, 16, 16"/>
<!-- cache controller buffer sizes: miss_buffer_size(MSHR),fill_buffer_size,prefetch_buffer_size,wb_buffer_size-->
<param name="clockrate" value="3500"/>
<param name="ports" value="1,1,1"/>
<!-- number of r, w, and rw ports -->
<param name="device_type" value="0"/>
<stat name="read_accesses" value="200000"/>
<stat name="write_accesses" value="0"/>
<stat name="read_misses" value="0"/>
<stat name="write_misses" value="0"/>
<stat name="conflicts" value="0"/>
<stat name="duty_cycle" value="0.5"/>
</component>
<!--**********************************************************************-->
<component id="system.L30" name="L30">
<param name="L3_config" value="1048576,64,16,1, 2,100, 64,1"/>
<!-- the parameters are capacity,block_width, associativity, bank, throughput w.r.t. core clock, latency w.r.t. core clock,output_width, cache policy -->
<param name="clockrate" value="3500"/>
<param name="ports" value="1,1,1"/>
<!-- number of r, w, and rw ports -->
<param name="device_type" value="0"/>
<param name="buffer_sizes" value="16, 16, 16, 16"/>
<!-- cache controller buffer sizes: miss_buffer_size(MSHR),fill_buffer_size,prefetch_buffer_size,wb_buffer_size-->
<stat name="read_accesses" value="58824"/>
<stat name="write_accesses" value="27276"/>
<stat name="read_misses" value="1632"/>
<stat name="write_misses" value="183"/>
<stat name="conflicts" value="0"/>
<stat name="duty_cycle" value="0.35"/>
</component>
<!--**********************************************************************-->
<component id="system.NoC0" name="noc0">
<param name="clockrate" value="3500"/>
<param name="type" value="1"/>
<!-- 1 NoC, O bus -->
<param name="horizontal_nodes" value="8"/>
<param name="vertical_nodes" value="8"/>
<param name="has_global_link" value="1"/>
<!-- 1 has global link, 0 does not have global link -->
<param name="link_throughput" value="1"/><!--w.r.t clock -->
<param name="link_latency" value="1"/><!--w.r.t clock -->
<!-- througput >= latency -->
<!-- Router architecture -->
<param name="input_ports" value="5"/>
<param name="output_ports" value="5"/>
<param name="virtual_channel_per_port" value="1"/>
<!-- input buffer; in classic routers only input ports need buffers -->
<param name="flit_bits" value="256"/>
<param name="input_buffer_entries_per_vc" value="4"/><!--VCs within the same ports share input buffers whose size is propotional to the number of VCs-->
<param name="chip_coverage" value="1"/>
<!-- When multiple NOC present, one NOC will cover part of the whole chip. chip_coverage <=1 -->
<stat name="total_accesses" value="360000"/>
<!-- This is the number of total accesses within the whole network not for each router -->
<stat name="duty_cycle" value="0.1"/>
</component>
<!--**********************************************************************-->
<component id="system.mem" name="mem">
<!-- Main memory property -->
<param name="mem_tech_node" value="32"/>
<param name="device_clock" value="200"/><!--MHz, this is clock rate of the actual memory device, not the FSB -->
<param name="peak_transfer_rate" value="3200"/><!--MB/S-->
<param name="internal_prefetch_of_DRAM_chip" value="4"/>
<!-- 2 for DDR, 4 for DDR2, 8 for DDR3...-->
<!-- the device clock, peak_transfer_rate, and the internal prefetch decide the DIMM property -->
<!-- above numbers can be easily found from Wikipedia -->
<param name="capacity_per_channel" value="4096"/> <!-- MB -->
<!-- capacity_per_Dram_chip=capacity_per_channel/number_of_dimms/number_ranks/Dram_chips_per_rank
Current McPAT assumes single DIMMs are used.-->
<param name="number_ranks" value="2"/>
<param name="num_banks_of_DRAM_chip" value="8"/>
<param name="Block_width_of_DRAM_chip" value="64"/> <!-- B -->
<param name="output_width_of_DRAM_chip" value="8"/>
<!--number of Dram_chips_per_rank=" 72/output_width_of_DRAM_chip-->
<!--number of Dram_chips_per_rank=" 72/output_width_of_DRAM_chip-->
<param name="page_size_of_DRAM_chip" value="8"/> <!-- 8 or 16 -->
<param name="burstlength_of_DRAM_chip" value="8"/>
<stat name="memory_accesses" value="1052"/>
<stat name="memory_reads" value="1052"/>
<stat name="memory_writes" value="1052"/>
</component>
<component id="system.mc" name="mc">
<!-- Memeory controllers are for DDR(2,3...) DIMMs -->
<!-- current version of McPAT uses published values for base parameters of memory controller
improvments on MC will be added in later versions. -->
<param name="mc_clock" value="200"/><!--DIMM IO bus clock rate MHz DDR2-400 for Niagara 1-->
<param name="peak_transfer_rate" value="3200"/><!--MB/S-->
<param name="llc_line_length" value="64"/><!--B-->
<param name="number_mcs" value="4"/>
<!-- current McPAT only supports homogeneous memory controllers -->
<param name="memory_channels_per_mc" value="1"/>
<param name="number_ranks" value="2"/>
<!-- # of ranks of each channel-->
<param name="req_window_size_per_channel" value="32"/>
<param name="IO_buffer_size_per_channel" value="32"/>
<param name="databus_width" value="128"/>
<param name="addressbus_width" value="51"/>
<!-- McPAT will add the control bus width to the addressbus width automatically -->
<stat name="memory_accesses" value="33333"/>
<stat name="memory_reads" value="16667"/>
<stat name="memory_writes" value="16667"/>
<!-- McPAT does not track individual mc, instead, it takes the total accesses and calculate
the average power per MC or per channel. This is sufficent for most application.
Further trackdown can be easily added in later versions. -->
</component>
<!--**********************************************************************-->
</component>
</component>
| {
"pile_set_name": "Github"
} |
log.level=${log.level}
log.path=${log.path}
dubbo.registry.address=${dubbo.registry.address}
dubbo.protocal.port=${dubbo.protocal.port}
dubbo.service.version=${dubbo.service.version}
ws.connect.path=${ws.connect.path}
ws.connect.port=${ws.connect.port}
ws.connect.bus.port=${ws.connect.bus.port}
service.name=ws_server
service.version=1.0
service.bus.name=bus_ws_server
service.bus.version=1.0
consul.host=${consul.host}
consul.port=${consul.port} | {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2017, 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "jfr/recorder/checkpoint/types/jfrTypeSetUtils.hpp"
#include "oops/instanceKlass.hpp"
#include "oops/oop.inline.hpp"
#include "oops/symbol.hpp"
static JfrSymbolId::CStringEntry* bootstrap = NULL;
JfrSymbolId::JfrSymbolId() :
_sym_table(new SymbolTable(this)),
_cstring_table(new CStringTable(this)),
_sym_list(NULL),
_cstring_list(NULL),
_sym_query(NULL),
_cstring_query(NULL),
_symbol_id_counter(1),
_class_unload(false) {
assert(_sym_table != NULL, "invariant");
assert(_cstring_table != NULL, "invariant");
bootstrap = new CStringEntry(0, (const char*)&BOOTSTRAP_LOADER_NAME);
assert(bootstrap != NULL, "invariant");
bootstrap->set_id(1);
_cstring_list = bootstrap;
}
JfrSymbolId::~JfrSymbolId() {
clear();
delete _sym_table;
delete _cstring_table;
delete bootstrap;
}
void JfrSymbolId::clear() {
assert(_sym_table != NULL, "invariant");
if (_sym_table->has_entries()) {
_sym_table->clear_entries();
}
assert(!_sym_table->has_entries(), "invariant");
assert(_cstring_table != NULL, "invariant");
if (_cstring_table->has_entries()) {
_cstring_table->clear_entries();
}
assert(!_cstring_table->has_entries(), "invariant");
_sym_list = NULL;
_symbol_id_counter = 1;
_sym_query = NULL;
_cstring_query = NULL;
assert(bootstrap != NULL, "invariant");
bootstrap->reset();
_cstring_list = bootstrap;
}
void JfrSymbolId::set_class_unload(bool class_unload) {
_class_unload = class_unload;
}
void JfrSymbolId::on_link(const SymbolEntry* entry) {
assert(entry != NULL, "invariant");
const_cast<Symbol*>(entry->literal())->increment_refcount();
assert(entry->id() == 0, "invariant");
entry->set_id(++_symbol_id_counter);
entry->set_list_next(_sym_list);
_sym_list = entry;
}
bool JfrSymbolId::on_equals(uintptr_t hash, const SymbolEntry* entry) {
assert(entry != NULL, "invariant");
assert(entry->hash() == hash, "invariant");
assert(_sym_query != NULL, "invariant");
return _sym_query == entry->literal();
}
void JfrSymbolId::on_unlink(const SymbolEntry* entry) {
assert(entry != NULL, "invariant");
const_cast<Symbol*>(entry->literal())->decrement_refcount();
}
static const char* resource_to_cstring(const char* resource_str) {
assert(resource_str != NULL, "invariant");
const size_t length = strlen(resource_str);
char* const c_string = JfrCHeapObj::new_array<char>(length + 1);
assert(c_string != NULL, "invariant");
strncpy(c_string, resource_str, length + 1);
return c_string;
}
void JfrSymbolId::on_link(const CStringEntry* entry) {
assert(entry != NULL, "invariant");
assert(entry->id() == 0, "invariant");
entry->set_id(++_symbol_id_counter);
const_cast<CStringEntry*>(entry)->set_literal(resource_to_cstring(entry->literal()));
entry->set_list_next(_cstring_list);
_cstring_list = entry;
}
static bool string_compare(const char* query, const char* candidate) {
assert(query != NULL, "invariant");
assert(candidate != NULL, "invariant");
const size_t length = strlen(query);
return strncmp(query, candidate, length) == 0;
}
bool JfrSymbolId::on_equals(uintptr_t hash, const CStringEntry* entry) {
assert(entry != NULL, "invariant");
assert(entry->hash() == hash, "invariant");
assert(_cstring_query != NULL, "invariant");
return string_compare(_cstring_query, entry->literal());
}
void JfrSymbolId::on_unlink(const CStringEntry* entry) {
assert(entry != NULL, "invariant");
JfrCHeapObj::free(const_cast<char*>(entry->literal()), strlen(entry->literal() + 1));
}
traceid JfrSymbolId::bootstrap_name(bool leakp) {
assert(bootstrap != NULL, "invariant");
if (leakp) {
bootstrap->set_leakp();
}
return 1;
}
traceid JfrSymbolId::mark(const Symbol* symbol, bool leakp) {
assert(symbol != NULL, "invariant");
return mark((uintptr_t)symbol->identity_hash(), symbol, leakp);
}
traceid JfrSymbolId::mark(uintptr_t hash, const Symbol* data, bool leakp) {
assert(data != NULL, "invariant");
assert(_sym_table != NULL, "invariant");
_sym_query = data;
const SymbolEntry& entry = _sym_table->lookup_put(hash, data);
if (_class_unload) {
entry.set_unloading();
}
if (leakp) {
entry.set_leakp();
}
return entry.id();
}
traceid JfrSymbolId::mark(uintptr_t hash, const char* str, bool leakp) {
assert(str != NULL, "invariant");
assert(_cstring_table != NULL, "invariant");
_cstring_query = str;
const CStringEntry& entry = _cstring_table->lookup_put(hash, str);
if (_class_unload) {
entry.set_unloading();
}
if (leakp) {
entry.set_leakp();
}
return entry.id();
}
/*
* jsr292 anonymous classes symbol is the external name +
* the identity_hashcode slash appended:
* java.lang.invoke.LambdaForm$BMH/22626602
*
* caller needs ResourceMark
*/
uintptr_t JfrSymbolId::unsafe_anonymous_klass_name_hash(const InstanceKlass* ik) {
assert(ik != NULL, "invariant");
assert(ik->is_anonymous(), "invariant");
const oop mirror = ik->java_mirror_no_keepalive();
assert(mirror != NULL, "invariant");
return (uintptr_t)mirror->identity_hash();
}
static const char* create_unsafe_anonymous_klass_symbol(const InstanceKlass* ik, uintptr_t hash) {
assert(ik != NULL, "invariant");
assert(ik->is_anonymous(), "invariant");
assert(hash != 0, "invariant");
char* anonymous_symbol = NULL;
const oop mirror = ik->java_mirror_no_keepalive();
assert(mirror != NULL, "invariant");
char hash_buf[40];
sprintf(hash_buf, "/" UINTX_FORMAT, hash);
const size_t hash_len = strlen(hash_buf);
const size_t result_len = ik->name()->utf8_length();
anonymous_symbol = NEW_RESOURCE_ARRAY(char, result_len + hash_len + 1);
ik->name()->as_klass_external_name(anonymous_symbol, (int)result_len + 1);
assert(strlen(anonymous_symbol) == result_len, "invariant");
strcpy(anonymous_symbol + result_len, hash_buf);
assert(strlen(anonymous_symbol) == result_len + hash_len, "invariant");
return anonymous_symbol;
}
bool JfrSymbolId::is_unsafe_anonymous_klass(const Klass* k) {
assert(k != NULL, "invariant");
return k->is_instance_klass() && ((const InstanceKlass*)k)->is_anonymous();
}
traceid JfrSymbolId::mark_unsafe_anonymous_klass_name(const InstanceKlass* ik, bool leakp) {
assert(ik != NULL, "invariant");
assert(ik->is_anonymous(), "invariant");
const uintptr_t hash = unsafe_anonymous_klass_name_hash(ik);
const char* const anonymous_klass_symbol = create_unsafe_anonymous_klass_symbol(ik, hash);
return mark(hash, anonymous_klass_symbol, leakp);
}
traceid JfrSymbolId::mark(const Klass* k, bool leakp) {
assert(k != NULL, "invariant");
traceid symbol_id = 0;
if (is_unsafe_anonymous_klass(k)) {
assert(k->is_instance_klass(), "invariant");
symbol_id = mark_unsafe_anonymous_klass_name((const InstanceKlass*)k, leakp);
}
if (0 == symbol_id) {
Symbol* const sym = k->name();
if (sym != NULL) {
symbol_id = mark(sym, leakp);
}
}
assert(symbol_id > 0, "a symbol handler must mark the symbol for writing");
return symbol_id;
}
JfrArtifactSet::JfrArtifactSet(bool class_unload) : _symbol_id(new JfrSymbolId()),
_klass_list(NULL),
_total_count(0) {
initialize(class_unload);
assert(_klass_list != NULL, "invariant");
}
static const size_t initial_class_list_size = 200;
void JfrArtifactSet::initialize(bool class_unload, bool clear /* false */) {
assert(_symbol_id != NULL, "invariant");
if (clear) {
_symbol_id->clear();
}
_symbol_id->set_class_unload(class_unload);
_total_count = 0;
// resource allocation
_klass_list = new GrowableArray<const Klass*>(initial_class_list_size, false, mtTracing);
}
JfrArtifactSet::~JfrArtifactSet() {
_symbol_id->clear();
delete _symbol_id;
// _klass_list will be cleared by a ResourceMark
}
traceid JfrArtifactSet::bootstrap_name(bool leakp) {
return _symbol_id->bootstrap_name(leakp);
}
traceid JfrArtifactSet::mark_unsafe_anonymous_klass_name(const Klass* klass, bool leakp) {
assert(klass->is_instance_klass(), "invariant");
return _symbol_id->mark_unsafe_anonymous_klass_name((const InstanceKlass*)klass, leakp);
}
traceid JfrArtifactSet::mark(uintptr_t hash, const Symbol* sym, bool leakp) {
return _symbol_id->mark(hash, sym, leakp);
}
traceid JfrArtifactSet::mark(const Klass* klass, bool leakp) {
return _symbol_id->mark(klass, leakp);
}
traceid JfrArtifactSet::mark(const Symbol* symbol, bool leakp) {
return _symbol_id->mark(symbol, leakp);
}
traceid JfrArtifactSet::mark(uintptr_t hash, const char* const str, bool leakp) {
return _symbol_id->mark(hash, str, leakp);
}
bool JfrArtifactSet::has_klass_entries() const {
return _klass_list->is_nonempty();
}
int JfrArtifactSet::entries() const {
return _klass_list->length();
}
void JfrArtifactSet::register_klass(const Klass* k) {
assert(k != NULL, "invariant");
assert(_klass_list != NULL, "invariant");
assert(_klass_list->find(k) == -1, "invariant");
_klass_list->append(k);
}
size_t JfrArtifactSet::total_count() const {
return _total_count;
}
| {
"pile_set_name": "Github"
} |
goog.module('nested.exported.enums');
/** @const */
exports = {
/** @const @enum {string} */
A: {
A1: 'a1',
},
// The structure of the AST changes if this extra property is present.
B: 0,
}; | {
"pile_set_name": "Github"
} |
/* Copyright 2019 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
import {Polygon} from '/lib/math/polygon2d.js';
import * as moduleInterface from '/lib/module_interface.js';
import * as moduleTicker from '/client/modules/module_ticker.js';
import * as network from '/client/network/network.js';
import * as peerNetwork from '/client/network/peer.js';
import {easyLog} from '/lib/log.js';
import assert from '/lib/assert.js';
import asset from '/client/asset/asset.js';
import conform from '/lib/conform.js';
import inject from '/lib/inject.js';
import * as stateManager from '/client/state/state_manager.js';
import {TitleCard} from '/client/title_card.js';
import * as time from '/client/util/time.js';
import {delay} from '/lib/promise.js';
function createNewContainer(name) {
var newContainer = document.createElement('div');
newContainer.className = 'container';
newContainer.id = 't-' + time.now();
newContainer.setAttribute('moduleName', name);
return newContainer;
}
export const FadeTransition = {
start(container) {
if (container) {
container.style.opacity = 0.001;
document.querySelector('#containers').appendChild(container);
}
},
async perform(oldModule, newModule, deadline) {
if (newModule.name == '_empty') {
// Fading out.. so fade *out* the *old* container.
oldModule.container.style.transition =
'opacity ' + time.until(deadline).toFixed(0) + 'ms';
oldModule.container.style.opacity = 0.0;
} else {
newModule.container.style.transition =
'opacity ' + time.until(deadline).toFixed(0) + 'ms';
newModule.container.style.opacity = 1.0;
}
// TODO(applmak): Maybe wait until css says that the transition is done?
await delay(time.until(deadline));
}
}
export class ClientModule {
constructor(name, path, config, titleCard, deadline, geo, transition) {
// The module name.
this.name = name;
// The path to the main file of this module.
this.path = path;
// The module config.
this.config = config;
// The title card instance for this module.
this.titleCard = titleCard;
// Absolute time when this module is supposed to be visible. Module will
// actually be faded in by deadline + 5000ms.
this.deadline = deadline;
// The wall geometry.
this.geo = geo;
// The transition to use to transition to this module.
this.transition = transition;
// The dom container for the module's content.
this.container = null;
// Module class instance.
this.instance = null;
// Network instance for this module.
this.network = null;
}
// Deserializes from the json serialized form of ModuleDef in the server.
static deserialize(bits) {
if (bits.module.name == '_empty') {
return ClientModule.newEmptyModule(bits.time);
}
return new ClientModule(
bits.module.name,
bits.module.path,
bits.module.config,
new TitleCard(bits.module.credit),
bits.time,
new Polygon(bits.geo),
FadeTransition,
);
}
static newEmptyModule(deadline = 0, transition = FadeTransition) {
return new ClientModule(
'_empty',
'',
{},
new TitleCard({}),
deadline,
new Polygon([{x: 0, y:0}]),
transition
);
}
// Extracted out for testing purposes.
static async loadPath(path) {
return await import(path);
}
async instantiate() {
this.container = createNewContainer(this.name);
if (!this.path) {
return;
}
const INSTANTIATION_ID =
`${this.geo.extents.serialize()}-${this.deadline}`;
this.network = network.forModule(INSTANTIATION_ID);
let openNetwork = this.network.open();
this.stateManager = stateManager.forModule(network, INSTANTIATION_ID);
const fakeEnv = {
asset,
debug: easyLog('wall:module:' + this.name),
game: undefined,
network: openNetwork,
titleCard: this.titleCard.getModuleAPI(),
state: this.stateManager.open(),
wallGeometry: this.geo,
peerNetwork,
assert,
};
try {
const {load} = await ClientModule.loadPath(this.path);
if (!load) {
throw new Error(`${this.name} did not export a 'load' function!`);
}
const {client} = inject(load, fakeEnv);
conform(client, moduleInterface.Client);
this.instance = new client(this.config);
} catch (e) {
// something went very wrong. Wind everything down.!
this.network.close();
this.network = null;
throw e;
}
}
// Returns true if module is still OK.
async willBeShownSoon() {
if (!this.path) {
return;
}
// Prep the container for transition.
// TODO(applmak): Move the transition smarts out of ClientModule.
this.transition.start(this.container);
try {
await this.instance.willBeShownSoon(this.container, this.deadline);
} catch(e) {
this.dispose();
throw e;
}
}
// Returns true if module is still OK.
beginTransitionIn(deadline) {
if (!this.path) {
return;
}
moduleTicker.add(this.name, this.instance);
try {
this.instance.beginFadeIn(deadline);
} catch (e) {
this.dispose();
throw e;
}
}
finishTransitionIn() {
if (!this.path) {
return;
}
this.titleCard.enter();
this.instance.finishFadeIn();
}
beginTransitionOut(deadline) {
if (!this.path) {
return;
}
this.titleCard.exit();
this.instance.beginFadeOut(deadline);
}
finishTransitionOut() {
if (!this.path) {
return;
}
this.instance.finishFadeOut();
}
async performTransition(otherModule, transitionFinishDeadline) {
await this.transition.perform(otherModule, this, transitionFinishDeadline);
}
dispose() {
if (this.container) {
this.container.remove();
this.container = null;
}
if (!this.path) {
return;
}
this.titleCard.exit(); // Just in case.
moduleTicker.remove(this.instance);
if (this.network) {
this.stateManager.close();
this.stateManager = null;
this.network.close();
this.network = null;
}
}
}
| {
"pile_set_name": "Github"
} |
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. See License.txt in the project root for license information.
namespace System.Data.Entity.TestModels.ProviderAgnosticModel
{
using System;
public enum AllTypesEnum
{
EnumValue0 = 0,
EnumValue1 = 1,
EnumValue2 = 2,
EnumValue3 = 3,
};
public class AllTypes
{
public int Id { get; set; }
public bool BooleanProperty { get; set; }
public byte ByteProperty { get; set; }
public DateTime DateTimeProperty { get; set; }
public decimal DecimalProperty { get; set; }
public double DoubleProperty { get; set; }
public byte[] FixedLengthBinaryProperty { get; set; }
public string FixedLengthStringProperty { get; set; }
public string FixedLengthUnicodeStringProperty { get; set; }
public float FloatProperty { get; set; }
public Guid GuidProperty { get; set; }
public short Int16Property { get; set; }
public int Int32Property { get; set; }
public long Int64Property { get; set; }
public byte[] MaxLengthBinaryProperty { get; set; }
public string MaxLengthStringProperty { get; set; }
public string MaxLengthUnicodeStringProperty { get; set; }
public TimeSpan TimeSpanProperty { get; set; }
public string VariableLengthStringProperty { get; set; }
public byte[] VariableLengthBinaryProperty { get; set; }
public string VariableLengthUnicodeStringProperty { get; set; }
public AllTypesEnum EnumProperty { get; set; }
}
}
| {
"pile_set_name": "Github"
} |
<?php
/**
* Specialized implementation of hook_page_manager_task_tasks(). See api-task.html for
* more information.
*/
function page_manager_contact_user_page_manager_tasks() {
if (!module_exists('contact')) {
return;
}
return array(
// This is a 'page' task and will fall under the page admin UI
'task type' => 'page',
'title' => t('User contact'),
'admin title' => t('User contact'),
'admin description' => t('When enabled, this overrides the default Drupal behavior for displaying the user contact form at <em>user/%user/contact</em>. If no variant is selected, the default Drupal user contact form will be used.'),
'admin path' => 'user/%user/contact',
// Callback to add items to the page managertask administration form:
'task admin' => 'page_manager_contact_user_task_admin',
'hook menu alter' => 'page_manager_contact_user_menu_alter',
// This is task uses 'context' handlers and must implement these to give the
// handler data it needs.
'handler type' => 'context', // handler type -- misnamed
'get arguments' => 'page_manager_contact_user_get_arguments',
'get context placeholders' => 'page_manager_contact_user_get_contexts',
// Allow this to be enabled or disabled:
'disabled' => variable_get('page_manager_contact_user_disabled', TRUE),
'enable callback' => 'page_manager_contact_user_enable',
);
}
/**
* Callback defined by page_manager_contact_user_page_manager_tasks().
*
* Alter the user view input so that user view comes to us rather than the
* normal user view process.
*/
function page_manager_contact_user_menu_alter(&$items, $task) {
if (variable_get('page_manager_contact_user_disabled', TRUE)) {
return;
}
// Override the user view handler for our purpose.
if ($items['user/%user/contact']['page callback'] == 'contact_user_page' || variable_get('page_manager_override_anyway', FALSE)) {
$items['user/%user/contact']['page callback'] = 'page_manager_contact_user';
$items['user/%user/contact']['file path'] = $task['path'];
$items['user/%user/contact']['file'] = $task['file'];
}
else {
// automatically disable this task if it cannot be enabled.
variable_set('page_manager_contact_user_disabled', TRUE);
if (!empty($GLOBALS['page_manager_enabling_contact_user'])) {
drupal_set_message(t('Page manager module is unable to enable user/%user/contact because some other module already has overridden with %callback.', array('%callback' => $items['user/%user/contact']['page callback'])), 'error');
}
}
}
/**
* Entry point for our overridden user view.
*
* This function asks its assigned handlers who, if anyone, would like
* to run with it. If no one does, it passes through to Drupal core's
* user view, which is user_page_view().
*/
function page_manager_contact_user($account) {
// Load my task plugin:
$task = page_manager_get_task('contact_user');
// Load the account into a context.
ctools_include('context');
ctools_include('context-task-handler');
$contexts = ctools_context_handler_get_task_contexts($task, '', array($account));
$output = ctools_context_handler_render($task, '', $contexts, array($account->uid));
if ($output !== FALSE) {
return $output;
}
module_load_include('inc', 'contact', 'contact.pages');
$function = 'contact_user_page';
foreach (module_implements('page_manager_override') as $module) {
$call = $module . '_page_manager_override';
if (($rc = $call('contact_user')) && function_exists($rc)) {
$function = $rc;
break;
}
}
// Otherwise, fall back.
return $function($account);
}
/**
* Callback to get arguments provided by this task handler.
*
* Since this is the node view and there is no UI on the arguments, we
* create dummy arguments that contain the needed data.
*/
function page_manager_contact_user_get_arguments($task, $subtask_id) {
return array(
array(
'keyword' => 'user',
'identifier' => t('User being viewed'),
'id' => 1,
'name' => 'uid',
'settings' => array(),
),
);
}
/**
* Callback to get context placeholders provided by this handler.
*/
function page_manager_contact_user_get_contexts($task, $subtask_id) {
return ctools_context_get_placeholders_from_argument(page_manager_contact_user_get_arguments($task, $subtask_id));
}
/**
* Callback to enable/disable the page from the UI.
*/
function page_manager_contact_user_enable($cache, $status) {
variable_set('page_manager_contact_user_disabled', $status);
// Set a global flag so that the menu routine knows it needs
// to set a message if enabling cannot be done.
if (!$status) {
$GLOBALS['page_manager_enabling_contact_user'] = TRUE;
}
}
| {
"pile_set_name": "Github"
} |
{
"short_name": "React App",
"name": "Create React App Sample",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
}
],
"start_url": "./index.html",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
}
| {
"pile_set_name": "Github"
} |
---
layout: example
title: Wheat Plot Example
permalink: /examples/wheat-plot/index.html
spec: wheat-plot
image: /examples/img/wheat-plot.png
---
A [wheat plot](http://www.perceptualedge.com/articles/visual_business_intelligence/the_datavis_jitterbug.pdf) is an alternative to standard dot plots and histograms that incorporates aspects of both. The x-coordinate of a point is based on its exact value. The y-coordinate is determined by grouping points into histogram bins, then stacking them based on their rank order within each bin. While not scalable to large numbers of data points, wheat plots allow inspection of (and interaction with) individual points without overplotting. For a related approach, see [beeswarm plots](../beeswarm-plot/).
{% include example spec=page.spec %}
| {
"pile_set_name": "Github"
} |
Size:
- 0.1
- 0.1
- 0.1
Color:
- 0.66
- 0.70220774
- 0.94
- 1
Body: Animated
Pose:
- - -0.41426134
- 0.9058533
- -8.841649e-2
- 1.6415431
- - 0.6057532
- 0.34691048
- 0.71604204
- 4.429285
- - 0.6793016
- 0.24306992
- -0.69243515
- 8.778018
- - 0.0
- 0.0
- 0.0
- 1
Shape: Cube
| {
"pile_set_name": "Github"
} |
// DO NOT EDIT.
//
// Generated by the Swift generator plugin for the protocol buffer compiler.
// Source: google/protobuf/unittest_proto3_arena.proto
//
// For information on using the generated types, please see the documenation:
// https://github.com/apple/swift-protobuf/
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import Foundation
import SwiftProtobuf
// If the compiler emits an error on this type, it is because this file
// was generated by a version of the `protoc` Swift plug-in that is
// incompatible with the version of SwiftProtobuf to which you are linking.
// Please ensure that your are building against the same version of the API
// that was used to generate this file.
fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck {
struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {}
typealias Version = _2
}
enum Proto3ArenaUnittest_ForeignEnum: SwiftProtobuf.Enum {
typealias RawValue = Int
case foreignZero // = 0
case foreignFoo // = 4
case foreignBar // = 5
case foreignBaz // = 6
case UNRECOGNIZED(Int)
init() {
self = .foreignZero
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .foreignZero
case 4: self = .foreignFoo
case 5: self = .foreignBar
case 6: self = .foreignBaz
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .foreignZero: return 0
case .foreignFoo: return 4
case .foreignBar: return 5
case .foreignBaz: return 6
case .UNRECOGNIZED(let i): return i
}
}
}
#if swift(>=4.2)
extension Proto3ArenaUnittest_ForeignEnum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Proto3ArenaUnittest_ForeignEnum] = [
.foreignZero,
.foreignFoo,
.foreignBar,
.foreignBaz,
]
}
#endif // swift(>=4.2)
/// This proto includes every type of field in both singular and repeated
/// forms.
struct Proto3ArenaUnittest_TestAllTypes {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// Singular
var optionalInt32: Int32 {
get {return _storage._optionalInt32}
set {_uniqueStorage()._optionalInt32 = newValue}
}
var optionalInt64: Int64 {
get {return _storage._optionalInt64}
set {_uniqueStorage()._optionalInt64 = newValue}
}
var optionalUint32: UInt32 {
get {return _storage._optionalUint32}
set {_uniqueStorage()._optionalUint32 = newValue}
}
var optionalUint64: UInt64 {
get {return _storage._optionalUint64}
set {_uniqueStorage()._optionalUint64 = newValue}
}
var optionalSint32: Int32 {
get {return _storage._optionalSint32}
set {_uniqueStorage()._optionalSint32 = newValue}
}
var optionalSint64: Int64 {
get {return _storage._optionalSint64}
set {_uniqueStorage()._optionalSint64 = newValue}
}
var optionalFixed32: UInt32 {
get {return _storage._optionalFixed32}
set {_uniqueStorage()._optionalFixed32 = newValue}
}
var optionalFixed64: UInt64 {
get {return _storage._optionalFixed64}
set {_uniqueStorage()._optionalFixed64 = newValue}
}
var optionalSfixed32: Int32 {
get {return _storage._optionalSfixed32}
set {_uniqueStorage()._optionalSfixed32 = newValue}
}
var optionalSfixed64: Int64 {
get {return _storage._optionalSfixed64}
set {_uniqueStorage()._optionalSfixed64 = newValue}
}
var optionalFloat: Float {
get {return _storage._optionalFloat}
set {_uniqueStorage()._optionalFloat = newValue}
}
var optionalDouble: Double {
get {return _storage._optionalDouble}
set {_uniqueStorage()._optionalDouble = newValue}
}
var optionalBool: Bool {
get {return _storage._optionalBool}
set {_uniqueStorage()._optionalBool = newValue}
}
var optionalString: String {
get {return _storage._optionalString}
set {_uniqueStorage()._optionalString = newValue}
}
var optionalBytes: Data {
get {return _storage._optionalBytes}
set {_uniqueStorage()._optionalBytes = newValue}
}
var optionalNestedMessage: Proto3ArenaUnittest_TestAllTypes.NestedMessage {
get {return _storage._optionalNestedMessage ?? Proto3ArenaUnittest_TestAllTypes.NestedMessage()}
set {_uniqueStorage()._optionalNestedMessage = newValue}
}
/// Returns true if `optionalNestedMessage` has been explicitly set.
var hasOptionalNestedMessage: Bool {return _storage._optionalNestedMessage != nil}
/// Clears the value of `optionalNestedMessage`. Subsequent reads from it will return its default value.
mutating func clearOptionalNestedMessage() {_uniqueStorage()._optionalNestedMessage = nil}
var optionalForeignMessage: Proto3ArenaUnittest_ForeignMessage {
get {return _storage._optionalForeignMessage ?? Proto3ArenaUnittest_ForeignMessage()}
set {_uniqueStorage()._optionalForeignMessage = newValue}
}
/// Returns true if `optionalForeignMessage` has been explicitly set.
var hasOptionalForeignMessage: Bool {return _storage._optionalForeignMessage != nil}
/// Clears the value of `optionalForeignMessage`. Subsequent reads from it will return its default value.
mutating func clearOptionalForeignMessage() {_uniqueStorage()._optionalForeignMessage = nil}
var optionalImportMessage: ProtobufUnittestImport_ImportMessage {
get {return _storage._optionalImportMessage ?? ProtobufUnittestImport_ImportMessage()}
set {_uniqueStorage()._optionalImportMessage = newValue}
}
/// Returns true if `optionalImportMessage` has been explicitly set.
var hasOptionalImportMessage: Bool {return _storage._optionalImportMessage != nil}
/// Clears the value of `optionalImportMessage`. Subsequent reads from it will return its default value.
mutating func clearOptionalImportMessage() {_uniqueStorage()._optionalImportMessage = nil}
var optionalNestedEnum: Proto3ArenaUnittest_TestAllTypes.NestedEnum {
get {return _storage._optionalNestedEnum}
set {_uniqueStorage()._optionalNestedEnum = newValue}
}
var optionalForeignEnum: Proto3ArenaUnittest_ForeignEnum {
get {return _storage._optionalForeignEnum}
set {_uniqueStorage()._optionalForeignEnum = newValue}
}
var optionalStringPiece: String {
get {return _storage._optionalStringPiece}
set {_uniqueStorage()._optionalStringPiece = newValue}
}
var optionalCord: String {
get {return _storage._optionalCord}
set {_uniqueStorage()._optionalCord = newValue}
}
/// Defined in unittest_import_public.proto
var optionalPublicImportMessage: ProtobufUnittestImport_PublicImportMessage {
get {return _storage._optionalPublicImportMessage ?? ProtobufUnittestImport_PublicImportMessage()}
set {_uniqueStorage()._optionalPublicImportMessage = newValue}
}
/// Returns true if `optionalPublicImportMessage` has been explicitly set.
var hasOptionalPublicImportMessage: Bool {return _storage._optionalPublicImportMessage != nil}
/// Clears the value of `optionalPublicImportMessage`. Subsequent reads from it will return its default value.
mutating func clearOptionalPublicImportMessage() {_uniqueStorage()._optionalPublicImportMessage = nil}
var optionalLazyMessage: Proto3ArenaUnittest_TestAllTypes.NestedMessage {
get {return _storage._optionalLazyMessage ?? Proto3ArenaUnittest_TestAllTypes.NestedMessage()}
set {_uniqueStorage()._optionalLazyMessage = newValue}
}
/// Returns true if `optionalLazyMessage` has been explicitly set.
var hasOptionalLazyMessage: Bool {return _storage._optionalLazyMessage != nil}
/// Clears the value of `optionalLazyMessage`. Subsequent reads from it will return its default value.
mutating func clearOptionalLazyMessage() {_uniqueStorage()._optionalLazyMessage = nil}
var optionalLazyImportMessage: ProtobufUnittestImport_ImportMessage {
get {return _storage._optionalLazyImportMessage ?? ProtobufUnittestImport_ImportMessage()}
set {_uniqueStorage()._optionalLazyImportMessage = newValue}
}
/// Returns true if `optionalLazyImportMessage` has been explicitly set.
var hasOptionalLazyImportMessage: Bool {return _storage._optionalLazyImportMessage != nil}
/// Clears the value of `optionalLazyImportMessage`. Subsequent reads from it will return its default value.
mutating func clearOptionalLazyImportMessage() {_uniqueStorage()._optionalLazyImportMessage = nil}
/// Repeated
var repeatedInt32: [Int32] {
get {return _storage._repeatedInt32}
set {_uniqueStorage()._repeatedInt32 = newValue}
}
var repeatedInt64: [Int64] {
get {return _storage._repeatedInt64}
set {_uniqueStorage()._repeatedInt64 = newValue}
}
var repeatedUint32: [UInt32] {
get {return _storage._repeatedUint32}
set {_uniqueStorage()._repeatedUint32 = newValue}
}
var repeatedUint64: [UInt64] {
get {return _storage._repeatedUint64}
set {_uniqueStorage()._repeatedUint64 = newValue}
}
var repeatedSint32: [Int32] {
get {return _storage._repeatedSint32}
set {_uniqueStorage()._repeatedSint32 = newValue}
}
var repeatedSint64: [Int64] {
get {return _storage._repeatedSint64}
set {_uniqueStorage()._repeatedSint64 = newValue}
}
var repeatedFixed32: [UInt32] {
get {return _storage._repeatedFixed32}
set {_uniqueStorage()._repeatedFixed32 = newValue}
}
var repeatedFixed64: [UInt64] {
get {return _storage._repeatedFixed64}
set {_uniqueStorage()._repeatedFixed64 = newValue}
}
var repeatedSfixed32: [Int32] {
get {return _storage._repeatedSfixed32}
set {_uniqueStorage()._repeatedSfixed32 = newValue}
}
var repeatedSfixed64: [Int64] {
get {return _storage._repeatedSfixed64}
set {_uniqueStorage()._repeatedSfixed64 = newValue}
}
var repeatedFloat: [Float] {
get {return _storage._repeatedFloat}
set {_uniqueStorage()._repeatedFloat = newValue}
}
var repeatedDouble: [Double] {
get {return _storage._repeatedDouble}
set {_uniqueStorage()._repeatedDouble = newValue}
}
var repeatedBool: [Bool] {
get {return _storage._repeatedBool}
set {_uniqueStorage()._repeatedBool = newValue}
}
var repeatedString: [String] {
get {return _storage._repeatedString}
set {_uniqueStorage()._repeatedString = newValue}
}
var repeatedBytes: [Data] {
get {return _storage._repeatedBytes}
set {_uniqueStorage()._repeatedBytes = newValue}
}
var repeatedNestedMessage: [Proto3ArenaUnittest_TestAllTypes.NestedMessage] {
get {return _storage._repeatedNestedMessage}
set {_uniqueStorage()._repeatedNestedMessage = newValue}
}
var repeatedForeignMessage: [Proto3ArenaUnittest_ForeignMessage] {
get {return _storage._repeatedForeignMessage}
set {_uniqueStorage()._repeatedForeignMessage = newValue}
}
var repeatedImportMessage: [ProtobufUnittestImport_ImportMessage] {
get {return _storage._repeatedImportMessage}
set {_uniqueStorage()._repeatedImportMessage = newValue}
}
var repeatedNestedEnum: [Proto3ArenaUnittest_TestAllTypes.NestedEnum] {
get {return _storage._repeatedNestedEnum}
set {_uniqueStorage()._repeatedNestedEnum = newValue}
}
var repeatedForeignEnum: [Proto3ArenaUnittest_ForeignEnum] {
get {return _storage._repeatedForeignEnum}
set {_uniqueStorage()._repeatedForeignEnum = newValue}
}
var repeatedStringPiece: [String] {
get {return _storage._repeatedStringPiece}
set {_uniqueStorage()._repeatedStringPiece = newValue}
}
var repeatedCord: [String] {
get {return _storage._repeatedCord}
set {_uniqueStorage()._repeatedCord = newValue}
}
var repeatedLazyMessage: [Proto3ArenaUnittest_TestAllTypes.NestedMessage] {
get {return _storage._repeatedLazyMessage}
set {_uniqueStorage()._repeatedLazyMessage = newValue}
}
var oneofField: OneOf_OneofField? {
get {return _storage._oneofField}
set {_uniqueStorage()._oneofField = newValue}
}
var oneofUint32: UInt32 {
get {
if case .oneofUint32(let v)? = _storage._oneofField {return v}
return 0
}
set {_uniqueStorage()._oneofField = .oneofUint32(newValue)}
}
var oneofNestedMessage: Proto3ArenaUnittest_TestAllTypes.NestedMessage {
get {
if case .oneofNestedMessage(let v)? = _storage._oneofField {return v}
return Proto3ArenaUnittest_TestAllTypes.NestedMessage()
}
set {_uniqueStorage()._oneofField = .oneofNestedMessage(newValue)}
}
var oneofString: String {
get {
if case .oneofString(let v)? = _storage._oneofField {return v}
return String()
}
set {_uniqueStorage()._oneofField = .oneofString(newValue)}
}
var oneofBytes: Data {
get {
if case .oneofBytes(let v)? = _storage._oneofField {return v}
return SwiftProtobuf.Internal.emptyData
}
set {_uniqueStorage()._oneofField = .oneofBytes(newValue)}
}
var unknownFields = SwiftProtobuf.UnknownStorage()
enum OneOf_OneofField: Equatable {
case oneofUint32(UInt32)
case oneofNestedMessage(Proto3ArenaUnittest_TestAllTypes.NestedMessage)
case oneofString(String)
case oneofBytes(Data)
#if !swift(>=4.1)
static func ==(lhs: Proto3ArenaUnittest_TestAllTypes.OneOf_OneofField, rhs: Proto3ArenaUnittest_TestAllTypes.OneOf_OneofField) -> Bool {
switch (lhs, rhs) {
case (.oneofUint32(let l), .oneofUint32(let r)): return l == r
case (.oneofNestedMessage(let l), .oneofNestedMessage(let r)): return l == r
case (.oneofString(let l), .oneofString(let r)): return l == r
case (.oneofBytes(let l), .oneofBytes(let r)): return l == r
default: return false
}
}
#endif
}
enum NestedEnum: SwiftProtobuf.Enum {
typealias RawValue = Int
case zero // = 0
case foo // = 1
case bar // = 2
case baz // = 3
/// Intentionally negative.
case neg // = -1
case UNRECOGNIZED(Int)
init() {
self = .zero
}
init?(rawValue: Int) {
switch rawValue {
case -1: self = .neg
case 0: self = .zero
case 1: self = .foo
case 2: self = .bar
case 3: self = .baz
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .neg: return -1
case .zero: return 0
case .foo: return 1
case .bar: return 2
case .baz: return 3
case .UNRECOGNIZED(let i): return i
}
}
}
struct NestedMessage {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// The field name "b" fails to compile in proto1 because it conflicts with
/// a local variable named "b" in one of the generated methods. Doh.
/// This file needs to compile in proto1 to test backwards-compatibility.
var bb: Int32 = 0
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
init() {}
fileprivate var _storage = _StorageClass.defaultInstance
}
#if swift(>=4.2)
extension Proto3ArenaUnittest_TestAllTypes.NestedEnum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Proto3ArenaUnittest_TestAllTypes.NestedEnum] = [
.zero,
.foo,
.bar,
.baz,
.neg,
]
}
#endif // swift(>=4.2)
struct Proto3ArenaUnittest_TestPackedTypes {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var packedInt32: [Int32] = []
var packedInt64: [Int64] = []
var packedUint32: [UInt32] = []
var packedUint64: [UInt64] = []
var packedSint32: [Int32] = []
var packedSint64: [Int64] = []
var packedFixed32: [UInt32] = []
var packedFixed64: [UInt64] = []
var packedSfixed32: [Int32] = []
var packedSfixed64: [Int64] = []
var packedFloat: [Float] = []
var packedDouble: [Double] = []
var packedBool: [Bool] = []
var packedEnum: [Proto3ArenaUnittest_ForeignEnum] = []
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// Explicitly set packed to false
struct Proto3ArenaUnittest_TestUnpackedTypes {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var repeatedInt32: [Int32] = []
var repeatedInt64: [Int64] = []
var repeatedUint32: [UInt32] = []
var repeatedUint64: [UInt64] = []
var repeatedSint32: [Int32] = []
var repeatedSint64: [Int64] = []
var repeatedFixed32: [UInt32] = []
var repeatedFixed64: [UInt64] = []
var repeatedSfixed32: [Int32] = []
var repeatedSfixed64: [Int64] = []
var repeatedFloat: [Float] = []
var repeatedDouble: [Double] = []
var repeatedBool: [Bool] = []
var repeatedNestedEnum: [Proto3ArenaUnittest_TestAllTypes.NestedEnum] = []
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// This proto includes a recusively nested message.
struct Proto3ArenaUnittest_NestedTestAllTypes {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var child: Proto3ArenaUnittest_NestedTestAllTypes {
get {return _storage._child ?? Proto3ArenaUnittest_NestedTestAllTypes()}
set {_uniqueStorage()._child = newValue}
}
/// Returns true if `child` has been explicitly set.
var hasChild: Bool {return _storage._child != nil}
/// Clears the value of `child`. Subsequent reads from it will return its default value.
mutating func clearChild() {_uniqueStorage()._child = nil}
var payload: Proto3ArenaUnittest_TestAllTypes {
get {return _storage._payload ?? Proto3ArenaUnittest_TestAllTypes()}
set {_uniqueStorage()._payload = newValue}
}
/// Returns true if `payload` has been explicitly set.
var hasPayload: Bool {return _storage._payload != nil}
/// Clears the value of `payload`. Subsequent reads from it will return its default value.
mutating func clearPayload() {_uniqueStorage()._payload = nil}
var repeatedChild: [Proto3ArenaUnittest_NestedTestAllTypes] {
get {return _storage._repeatedChild}
set {_uniqueStorage()._repeatedChild = newValue}
}
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _storage = _StorageClass.defaultInstance
}
/// Define these after TestAllTypes to make sure the compiler can handle
/// that.
struct Proto3ArenaUnittest_ForeignMessage {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var c: Int32 = 0
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// TestEmptyMessage is used to test behavior of unknown fields.
struct Proto3ArenaUnittest_TestEmptyMessage {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
// MARK: - Code below here is support for the SwiftProtobuf runtime.
fileprivate let _protobuf_package = "proto3_arena_unittest"
extension Proto3ArenaUnittest_ForeignEnum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "FOREIGN_ZERO"),
4: .same(proto: "FOREIGN_FOO"),
5: .same(proto: "FOREIGN_BAR"),
6: .same(proto: "FOREIGN_BAZ"),
]
}
extension Proto3ArenaUnittest_TestAllTypes: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".TestAllTypes"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "optional_int32"),
2: .standard(proto: "optional_int64"),
3: .standard(proto: "optional_uint32"),
4: .standard(proto: "optional_uint64"),
5: .standard(proto: "optional_sint32"),
6: .standard(proto: "optional_sint64"),
7: .standard(proto: "optional_fixed32"),
8: .standard(proto: "optional_fixed64"),
9: .standard(proto: "optional_sfixed32"),
10: .standard(proto: "optional_sfixed64"),
11: .standard(proto: "optional_float"),
12: .standard(proto: "optional_double"),
13: .standard(proto: "optional_bool"),
14: .standard(proto: "optional_string"),
15: .standard(proto: "optional_bytes"),
18: .standard(proto: "optional_nested_message"),
19: .standard(proto: "optional_foreign_message"),
20: .standard(proto: "optional_import_message"),
21: .standard(proto: "optional_nested_enum"),
22: .standard(proto: "optional_foreign_enum"),
24: .standard(proto: "optional_string_piece"),
25: .standard(proto: "optional_cord"),
26: .standard(proto: "optional_public_import_message"),
27: .standard(proto: "optional_lazy_message"),
115: .standard(proto: "optional_lazy_import_message"),
31: .standard(proto: "repeated_int32"),
32: .standard(proto: "repeated_int64"),
33: .standard(proto: "repeated_uint32"),
34: .standard(proto: "repeated_uint64"),
35: .standard(proto: "repeated_sint32"),
36: .standard(proto: "repeated_sint64"),
37: .standard(proto: "repeated_fixed32"),
38: .standard(proto: "repeated_fixed64"),
39: .standard(proto: "repeated_sfixed32"),
40: .standard(proto: "repeated_sfixed64"),
41: .standard(proto: "repeated_float"),
42: .standard(proto: "repeated_double"),
43: .standard(proto: "repeated_bool"),
44: .standard(proto: "repeated_string"),
45: .standard(proto: "repeated_bytes"),
48: .standard(proto: "repeated_nested_message"),
49: .standard(proto: "repeated_foreign_message"),
50: .standard(proto: "repeated_import_message"),
51: .standard(proto: "repeated_nested_enum"),
52: .standard(proto: "repeated_foreign_enum"),
54: .standard(proto: "repeated_string_piece"),
55: .standard(proto: "repeated_cord"),
57: .standard(proto: "repeated_lazy_message"),
111: .standard(proto: "oneof_uint32"),
112: .standard(proto: "oneof_nested_message"),
113: .standard(proto: "oneof_string"),
114: .standard(proto: "oneof_bytes"),
]
fileprivate class _StorageClass {
var _optionalInt32: Int32 = 0
var _optionalInt64: Int64 = 0
var _optionalUint32: UInt32 = 0
var _optionalUint64: UInt64 = 0
var _optionalSint32: Int32 = 0
var _optionalSint64: Int64 = 0
var _optionalFixed32: UInt32 = 0
var _optionalFixed64: UInt64 = 0
var _optionalSfixed32: Int32 = 0
var _optionalSfixed64: Int64 = 0
var _optionalFloat: Float = 0
var _optionalDouble: Double = 0
var _optionalBool: Bool = false
var _optionalString: String = String()
var _optionalBytes: Data = SwiftProtobuf.Internal.emptyData
var _optionalNestedMessage: Proto3ArenaUnittest_TestAllTypes.NestedMessage? = nil
var _optionalForeignMessage: Proto3ArenaUnittest_ForeignMessage? = nil
var _optionalImportMessage: ProtobufUnittestImport_ImportMessage? = nil
var _optionalNestedEnum: Proto3ArenaUnittest_TestAllTypes.NestedEnum = .zero
var _optionalForeignEnum: Proto3ArenaUnittest_ForeignEnum = .foreignZero
var _optionalStringPiece: String = String()
var _optionalCord: String = String()
var _optionalPublicImportMessage: ProtobufUnittestImport_PublicImportMessage? = nil
var _optionalLazyMessage: Proto3ArenaUnittest_TestAllTypes.NestedMessage? = nil
var _optionalLazyImportMessage: ProtobufUnittestImport_ImportMessage? = nil
var _repeatedInt32: [Int32] = []
var _repeatedInt64: [Int64] = []
var _repeatedUint32: [UInt32] = []
var _repeatedUint64: [UInt64] = []
var _repeatedSint32: [Int32] = []
var _repeatedSint64: [Int64] = []
var _repeatedFixed32: [UInt32] = []
var _repeatedFixed64: [UInt64] = []
var _repeatedSfixed32: [Int32] = []
var _repeatedSfixed64: [Int64] = []
var _repeatedFloat: [Float] = []
var _repeatedDouble: [Double] = []
var _repeatedBool: [Bool] = []
var _repeatedString: [String] = []
var _repeatedBytes: [Data] = []
var _repeatedNestedMessage: [Proto3ArenaUnittest_TestAllTypes.NestedMessage] = []
var _repeatedForeignMessage: [Proto3ArenaUnittest_ForeignMessage] = []
var _repeatedImportMessage: [ProtobufUnittestImport_ImportMessage] = []
var _repeatedNestedEnum: [Proto3ArenaUnittest_TestAllTypes.NestedEnum] = []
var _repeatedForeignEnum: [Proto3ArenaUnittest_ForeignEnum] = []
var _repeatedStringPiece: [String] = []
var _repeatedCord: [String] = []
var _repeatedLazyMessage: [Proto3ArenaUnittest_TestAllTypes.NestedMessage] = []
var _oneofField: Proto3ArenaUnittest_TestAllTypes.OneOf_OneofField?
static let defaultInstance = _StorageClass()
private init() {}
init(copying source: _StorageClass) {
_optionalInt32 = source._optionalInt32
_optionalInt64 = source._optionalInt64
_optionalUint32 = source._optionalUint32
_optionalUint64 = source._optionalUint64
_optionalSint32 = source._optionalSint32
_optionalSint64 = source._optionalSint64
_optionalFixed32 = source._optionalFixed32
_optionalFixed64 = source._optionalFixed64
_optionalSfixed32 = source._optionalSfixed32
_optionalSfixed64 = source._optionalSfixed64
_optionalFloat = source._optionalFloat
_optionalDouble = source._optionalDouble
_optionalBool = source._optionalBool
_optionalString = source._optionalString
_optionalBytes = source._optionalBytes
_optionalNestedMessage = source._optionalNestedMessage
_optionalForeignMessage = source._optionalForeignMessage
_optionalImportMessage = source._optionalImportMessage
_optionalNestedEnum = source._optionalNestedEnum
_optionalForeignEnum = source._optionalForeignEnum
_optionalStringPiece = source._optionalStringPiece
_optionalCord = source._optionalCord
_optionalPublicImportMessage = source._optionalPublicImportMessage
_optionalLazyMessage = source._optionalLazyMessage
_optionalLazyImportMessage = source._optionalLazyImportMessage
_repeatedInt32 = source._repeatedInt32
_repeatedInt64 = source._repeatedInt64
_repeatedUint32 = source._repeatedUint32
_repeatedUint64 = source._repeatedUint64
_repeatedSint32 = source._repeatedSint32
_repeatedSint64 = source._repeatedSint64
_repeatedFixed32 = source._repeatedFixed32
_repeatedFixed64 = source._repeatedFixed64
_repeatedSfixed32 = source._repeatedSfixed32
_repeatedSfixed64 = source._repeatedSfixed64
_repeatedFloat = source._repeatedFloat
_repeatedDouble = source._repeatedDouble
_repeatedBool = source._repeatedBool
_repeatedString = source._repeatedString
_repeatedBytes = source._repeatedBytes
_repeatedNestedMessage = source._repeatedNestedMessage
_repeatedForeignMessage = source._repeatedForeignMessage
_repeatedImportMessage = source._repeatedImportMessage
_repeatedNestedEnum = source._repeatedNestedEnum
_repeatedForeignEnum = source._repeatedForeignEnum
_repeatedStringPiece = source._repeatedStringPiece
_repeatedCord = source._repeatedCord
_repeatedLazyMessage = source._repeatedLazyMessage
_oneofField = source._oneofField
}
}
fileprivate mutating func _uniqueStorage() -> _StorageClass {
if !isKnownUniquelyReferenced(&_storage) {
_storage = _StorageClass(copying: _storage)
}
return _storage
}
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
_ = _uniqueStorage()
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
while let fieldNumber = try decoder.nextFieldNumber() {
switch fieldNumber {
case 1: try decoder.decodeSingularInt32Field(value: &_storage._optionalInt32)
case 2: try decoder.decodeSingularInt64Field(value: &_storage._optionalInt64)
case 3: try decoder.decodeSingularUInt32Field(value: &_storage._optionalUint32)
case 4: try decoder.decodeSingularUInt64Field(value: &_storage._optionalUint64)
case 5: try decoder.decodeSingularSInt32Field(value: &_storage._optionalSint32)
case 6: try decoder.decodeSingularSInt64Field(value: &_storage._optionalSint64)
case 7: try decoder.decodeSingularFixed32Field(value: &_storage._optionalFixed32)
case 8: try decoder.decodeSingularFixed64Field(value: &_storage._optionalFixed64)
case 9: try decoder.decodeSingularSFixed32Field(value: &_storage._optionalSfixed32)
case 10: try decoder.decodeSingularSFixed64Field(value: &_storage._optionalSfixed64)
case 11: try decoder.decodeSingularFloatField(value: &_storage._optionalFloat)
case 12: try decoder.decodeSingularDoubleField(value: &_storage._optionalDouble)
case 13: try decoder.decodeSingularBoolField(value: &_storage._optionalBool)
case 14: try decoder.decodeSingularStringField(value: &_storage._optionalString)
case 15: try decoder.decodeSingularBytesField(value: &_storage._optionalBytes)
case 18: try decoder.decodeSingularMessageField(value: &_storage._optionalNestedMessage)
case 19: try decoder.decodeSingularMessageField(value: &_storage._optionalForeignMessage)
case 20: try decoder.decodeSingularMessageField(value: &_storage._optionalImportMessage)
case 21: try decoder.decodeSingularEnumField(value: &_storage._optionalNestedEnum)
case 22: try decoder.decodeSingularEnumField(value: &_storage._optionalForeignEnum)
case 24: try decoder.decodeSingularStringField(value: &_storage._optionalStringPiece)
case 25: try decoder.decodeSingularStringField(value: &_storage._optionalCord)
case 26: try decoder.decodeSingularMessageField(value: &_storage._optionalPublicImportMessage)
case 27: try decoder.decodeSingularMessageField(value: &_storage._optionalLazyMessage)
case 31: try decoder.decodeRepeatedInt32Field(value: &_storage._repeatedInt32)
case 32: try decoder.decodeRepeatedInt64Field(value: &_storage._repeatedInt64)
case 33: try decoder.decodeRepeatedUInt32Field(value: &_storage._repeatedUint32)
case 34: try decoder.decodeRepeatedUInt64Field(value: &_storage._repeatedUint64)
case 35: try decoder.decodeRepeatedSInt32Field(value: &_storage._repeatedSint32)
case 36: try decoder.decodeRepeatedSInt64Field(value: &_storage._repeatedSint64)
case 37: try decoder.decodeRepeatedFixed32Field(value: &_storage._repeatedFixed32)
case 38: try decoder.decodeRepeatedFixed64Field(value: &_storage._repeatedFixed64)
case 39: try decoder.decodeRepeatedSFixed32Field(value: &_storage._repeatedSfixed32)
case 40: try decoder.decodeRepeatedSFixed64Field(value: &_storage._repeatedSfixed64)
case 41: try decoder.decodeRepeatedFloatField(value: &_storage._repeatedFloat)
case 42: try decoder.decodeRepeatedDoubleField(value: &_storage._repeatedDouble)
case 43: try decoder.decodeRepeatedBoolField(value: &_storage._repeatedBool)
case 44: try decoder.decodeRepeatedStringField(value: &_storage._repeatedString)
case 45: try decoder.decodeRepeatedBytesField(value: &_storage._repeatedBytes)
case 48: try decoder.decodeRepeatedMessageField(value: &_storage._repeatedNestedMessage)
case 49: try decoder.decodeRepeatedMessageField(value: &_storage._repeatedForeignMessage)
case 50: try decoder.decodeRepeatedMessageField(value: &_storage._repeatedImportMessage)
case 51: try decoder.decodeRepeatedEnumField(value: &_storage._repeatedNestedEnum)
case 52: try decoder.decodeRepeatedEnumField(value: &_storage._repeatedForeignEnum)
case 54: try decoder.decodeRepeatedStringField(value: &_storage._repeatedStringPiece)
case 55: try decoder.decodeRepeatedStringField(value: &_storage._repeatedCord)
case 57: try decoder.decodeRepeatedMessageField(value: &_storage._repeatedLazyMessage)
case 111:
if _storage._oneofField != nil {try decoder.handleConflictingOneOf()}
var v: UInt32?
try decoder.decodeSingularUInt32Field(value: &v)
if let v = v {_storage._oneofField = .oneofUint32(v)}
case 112:
var v: Proto3ArenaUnittest_TestAllTypes.NestedMessage?
if let current = _storage._oneofField {
try decoder.handleConflictingOneOf()
if case .oneofNestedMessage(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {_storage._oneofField = .oneofNestedMessage(v)}
case 113:
if _storage._oneofField != nil {try decoder.handleConflictingOneOf()}
var v: String?
try decoder.decodeSingularStringField(value: &v)
if let v = v {_storage._oneofField = .oneofString(v)}
case 114:
if _storage._oneofField != nil {try decoder.handleConflictingOneOf()}
var v: Data?
try decoder.decodeSingularBytesField(value: &v)
if let v = v {_storage._oneofField = .oneofBytes(v)}
case 115: try decoder.decodeSingularMessageField(value: &_storage._optionalLazyImportMessage)
default: break
}
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
if _storage._optionalInt32 != 0 {
try visitor.visitSingularInt32Field(value: _storage._optionalInt32, fieldNumber: 1)
}
if _storage._optionalInt64 != 0 {
try visitor.visitSingularInt64Field(value: _storage._optionalInt64, fieldNumber: 2)
}
if _storage._optionalUint32 != 0 {
try visitor.visitSingularUInt32Field(value: _storage._optionalUint32, fieldNumber: 3)
}
if _storage._optionalUint64 != 0 {
try visitor.visitSingularUInt64Field(value: _storage._optionalUint64, fieldNumber: 4)
}
if _storage._optionalSint32 != 0 {
try visitor.visitSingularSInt32Field(value: _storage._optionalSint32, fieldNumber: 5)
}
if _storage._optionalSint64 != 0 {
try visitor.visitSingularSInt64Field(value: _storage._optionalSint64, fieldNumber: 6)
}
if _storage._optionalFixed32 != 0 {
try visitor.visitSingularFixed32Field(value: _storage._optionalFixed32, fieldNumber: 7)
}
if _storage._optionalFixed64 != 0 {
try visitor.visitSingularFixed64Field(value: _storage._optionalFixed64, fieldNumber: 8)
}
if _storage._optionalSfixed32 != 0 {
try visitor.visitSingularSFixed32Field(value: _storage._optionalSfixed32, fieldNumber: 9)
}
if _storage._optionalSfixed64 != 0 {
try visitor.visitSingularSFixed64Field(value: _storage._optionalSfixed64, fieldNumber: 10)
}
if _storage._optionalFloat != 0 {
try visitor.visitSingularFloatField(value: _storage._optionalFloat, fieldNumber: 11)
}
if _storage._optionalDouble != 0 {
try visitor.visitSingularDoubleField(value: _storage._optionalDouble, fieldNumber: 12)
}
if _storage._optionalBool != false {
try visitor.visitSingularBoolField(value: _storage._optionalBool, fieldNumber: 13)
}
if !_storage._optionalString.isEmpty {
try visitor.visitSingularStringField(value: _storage._optionalString, fieldNumber: 14)
}
if !_storage._optionalBytes.isEmpty {
try visitor.visitSingularBytesField(value: _storage._optionalBytes, fieldNumber: 15)
}
if let v = _storage._optionalNestedMessage {
try visitor.visitSingularMessageField(value: v, fieldNumber: 18)
}
if let v = _storage._optionalForeignMessage {
try visitor.visitSingularMessageField(value: v, fieldNumber: 19)
}
if let v = _storage._optionalImportMessage {
try visitor.visitSingularMessageField(value: v, fieldNumber: 20)
}
if _storage._optionalNestedEnum != .zero {
try visitor.visitSingularEnumField(value: _storage._optionalNestedEnum, fieldNumber: 21)
}
if _storage._optionalForeignEnum != .foreignZero {
try visitor.visitSingularEnumField(value: _storage._optionalForeignEnum, fieldNumber: 22)
}
if !_storage._optionalStringPiece.isEmpty {
try visitor.visitSingularStringField(value: _storage._optionalStringPiece, fieldNumber: 24)
}
if !_storage._optionalCord.isEmpty {
try visitor.visitSingularStringField(value: _storage._optionalCord, fieldNumber: 25)
}
if let v = _storage._optionalPublicImportMessage {
try visitor.visitSingularMessageField(value: v, fieldNumber: 26)
}
if let v = _storage._optionalLazyMessage {
try visitor.visitSingularMessageField(value: v, fieldNumber: 27)
}
if !_storage._repeatedInt32.isEmpty {
try visitor.visitPackedInt32Field(value: _storage._repeatedInt32, fieldNumber: 31)
}
if !_storage._repeatedInt64.isEmpty {
try visitor.visitPackedInt64Field(value: _storage._repeatedInt64, fieldNumber: 32)
}
if !_storage._repeatedUint32.isEmpty {
try visitor.visitPackedUInt32Field(value: _storage._repeatedUint32, fieldNumber: 33)
}
if !_storage._repeatedUint64.isEmpty {
try visitor.visitPackedUInt64Field(value: _storage._repeatedUint64, fieldNumber: 34)
}
if !_storage._repeatedSint32.isEmpty {
try visitor.visitPackedSInt32Field(value: _storage._repeatedSint32, fieldNumber: 35)
}
if !_storage._repeatedSint64.isEmpty {
try visitor.visitPackedSInt64Field(value: _storage._repeatedSint64, fieldNumber: 36)
}
if !_storage._repeatedFixed32.isEmpty {
try visitor.visitPackedFixed32Field(value: _storage._repeatedFixed32, fieldNumber: 37)
}
if !_storage._repeatedFixed64.isEmpty {
try visitor.visitPackedFixed64Field(value: _storage._repeatedFixed64, fieldNumber: 38)
}
if !_storage._repeatedSfixed32.isEmpty {
try visitor.visitPackedSFixed32Field(value: _storage._repeatedSfixed32, fieldNumber: 39)
}
if !_storage._repeatedSfixed64.isEmpty {
try visitor.visitPackedSFixed64Field(value: _storage._repeatedSfixed64, fieldNumber: 40)
}
if !_storage._repeatedFloat.isEmpty {
try visitor.visitPackedFloatField(value: _storage._repeatedFloat, fieldNumber: 41)
}
if !_storage._repeatedDouble.isEmpty {
try visitor.visitPackedDoubleField(value: _storage._repeatedDouble, fieldNumber: 42)
}
if !_storage._repeatedBool.isEmpty {
try visitor.visitPackedBoolField(value: _storage._repeatedBool, fieldNumber: 43)
}
if !_storage._repeatedString.isEmpty {
try visitor.visitRepeatedStringField(value: _storage._repeatedString, fieldNumber: 44)
}
if !_storage._repeatedBytes.isEmpty {
try visitor.visitRepeatedBytesField(value: _storage._repeatedBytes, fieldNumber: 45)
}
if !_storage._repeatedNestedMessage.isEmpty {
try visitor.visitRepeatedMessageField(value: _storage._repeatedNestedMessage, fieldNumber: 48)
}
if !_storage._repeatedForeignMessage.isEmpty {
try visitor.visitRepeatedMessageField(value: _storage._repeatedForeignMessage, fieldNumber: 49)
}
if !_storage._repeatedImportMessage.isEmpty {
try visitor.visitRepeatedMessageField(value: _storage._repeatedImportMessage, fieldNumber: 50)
}
if !_storage._repeatedNestedEnum.isEmpty {
try visitor.visitPackedEnumField(value: _storage._repeatedNestedEnum, fieldNumber: 51)
}
if !_storage._repeatedForeignEnum.isEmpty {
try visitor.visitPackedEnumField(value: _storage._repeatedForeignEnum, fieldNumber: 52)
}
if !_storage._repeatedStringPiece.isEmpty {
try visitor.visitRepeatedStringField(value: _storage._repeatedStringPiece, fieldNumber: 54)
}
if !_storage._repeatedCord.isEmpty {
try visitor.visitRepeatedStringField(value: _storage._repeatedCord, fieldNumber: 55)
}
if !_storage._repeatedLazyMessage.isEmpty {
try visitor.visitRepeatedMessageField(value: _storage._repeatedLazyMessage, fieldNumber: 57)
}
switch _storage._oneofField {
case .oneofUint32(let v)?:
try visitor.visitSingularUInt32Field(value: v, fieldNumber: 111)
case .oneofNestedMessage(let v)?:
try visitor.visitSingularMessageField(value: v, fieldNumber: 112)
case .oneofString(let v)?:
try visitor.visitSingularStringField(value: v, fieldNumber: 113)
case .oneofBytes(let v)?:
try visitor.visitSingularBytesField(value: v, fieldNumber: 114)
case nil: break
}
if let v = _storage._optionalLazyImportMessage {
try visitor.visitSingularMessageField(value: v, fieldNumber: 115)
}
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Proto3ArenaUnittest_TestAllTypes, rhs: Proto3ArenaUnittest_TestAllTypes) -> Bool {
if lhs._storage !== rhs._storage {
let storagesAreEqual: Bool = withExtendedLifetime((lhs._storage, rhs._storage)) { (_args: (_StorageClass, _StorageClass)) in
let _storage = _args.0
let rhs_storage = _args.1
if _storage._optionalInt32 != rhs_storage._optionalInt32 {return false}
if _storage._optionalInt64 != rhs_storage._optionalInt64 {return false}
if _storage._optionalUint32 != rhs_storage._optionalUint32 {return false}
if _storage._optionalUint64 != rhs_storage._optionalUint64 {return false}
if _storage._optionalSint32 != rhs_storage._optionalSint32 {return false}
if _storage._optionalSint64 != rhs_storage._optionalSint64 {return false}
if _storage._optionalFixed32 != rhs_storage._optionalFixed32 {return false}
if _storage._optionalFixed64 != rhs_storage._optionalFixed64 {return false}
if _storage._optionalSfixed32 != rhs_storage._optionalSfixed32 {return false}
if _storage._optionalSfixed64 != rhs_storage._optionalSfixed64 {return false}
if _storage._optionalFloat != rhs_storage._optionalFloat {return false}
if _storage._optionalDouble != rhs_storage._optionalDouble {return false}
if _storage._optionalBool != rhs_storage._optionalBool {return false}
if _storage._optionalString != rhs_storage._optionalString {return false}
if _storage._optionalBytes != rhs_storage._optionalBytes {return false}
if _storage._optionalNestedMessage != rhs_storage._optionalNestedMessage {return false}
if _storage._optionalForeignMessage != rhs_storage._optionalForeignMessage {return false}
if _storage._optionalImportMessage != rhs_storage._optionalImportMessage {return false}
if _storage._optionalNestedEnum != rhs_storage._optionalNestedEnum {return false}
if _storage._optionalForeignEnum != rhs_storage._optionalForeignEnum {return false}
if _storage._optionalStringPiece != rhs_storage._optionalStringPiece {return false}
if _storage._optionalCord != rhs_storage._optionalCord {return false}
if _storage._optionalPublicImportMessage != rhs_storage._optionalPublicImportMessage {return false}
if _storage._optionalLazyMessage != rhs_storage._optionalLazyMessage {return false}
if _storage._optionalLazyImportMessage != rhs_storage._optionalLazyImportMessage {return false}
if _storage._repeatedInt32 != rhs_storage._repeatedInt32 {return false}
if _storage._repeatedInt64 != rhs_storage._repeatedInt64 {return false}
if _storage._repeatedUint32 != rhs_storage._repeatedUint32 {return false}
if _storage._repeatedUint64 != rhs_storage._repeatedUint64 {return false}
if _storage._repeatedSint32 != rhs_storage._repeatedSint32 {return false}
if _storage._repeatedSint64 != rhs_storage._repeatedSint64 {return false}
if _storage._repeatedFixed32 != rhs_storage._repeatedFixed32 {return false}
if _storage._repeatedFixed64 != rhs_storage._repeatedFixed64 {return false}
if _storage._repeatedSfixed32 != rhs_storage._repeatedSfixed32 {return false}
if _storage._repeatedSfixed64 != rhs_storage._repeatedSfixed64 {return false}
if _storage._repeatedFloat != rhs_storage._repeatedFloat {return false}
if _storage._repeatedDouble != rhs_storage._repeatedDouble {return false}
if _storage._repeatedBool != rhs_storage._repeatedBool {return false}
if _storage._repeatedString != rhs_storage._repeatedString {return false}
if _storage._repeatedBytes != rhs_storage._repeatedBytes {return false}
if _storage._repeatedNestedMessage != rhs_storage._repeatedNestedMessage {return false}
if _storage._repeatedForeignMessage != rhs_storage._repeatedForeignMessage {return false}
if _storage._repeatedImportMessage != rhs_storage._repeatedImportMessage {return false}
if _storage._repeatedNestedEnum != rhs_storage._repeatedNestedEnum {return false}
if _storage._repeatedForeignEnum != rhs_storage._repeatedForeignEnum {return false}
if _storage._repeatedStringPiece != rhs_storage._repeatedStringPiece {return false}
if _storage._repeatedCord != rhs_storage._repeatedCord {return false}
if _storage._repeatedLazyMessage != rhs_storage._repeatedLazyMessage {return false}
if _storage._oneofField != rhs_storage._oneofField {return false}
return true
}
if !storagesAreEqual {return false}
}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Proto3ArenaUnittest_TestAllTypes.NestedEnum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
-1: .same(proto: "NEG"),
0: .same(proto: "ZERO"),
1: .same(proto: "FOO"),
2: .same(proto: "BAR"),
3: .same(proto: "BAZ"),
]
}
extension Proto3ArenaUnittest_TestAllTypes.NestedMessage: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Proto3ArenaUnittest_TestAllTypes.protoMessageName + ".NestedMessage"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "bb"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
switch fieldNumber {
case 1: try decoder.decodeSingularInt32Field(value: &self.bb)
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if self.bb != 0 {
try visitor.visitSingularInt32Field(value: self.bb, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Proto3ArenaUnittest_TestAllTypes.NestedMessage, rhs: Proto3ArenaUnittest_TestAllTypes.NestedMessage) -> Bool {
if lhs.bb != rhs.bb {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Proto3ArenaUnittest_TestPackedTypes: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".TestPackedTypes"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
90: .standard(proto: "packed_int32"),
91: .standard(proto: "packed_int64"),
92: .standard(proto: "packed_uint32"),
93: .standard(proto: "packed_uint64"),
94: .standard(proto: "packed_sint32"),
95: .standard(proto: "packed_sint64"),
96: .standard(proto: "packed_fixed32"),
97: .standard(proto: "packed_fixed64"),
98: .standard(proto: "packed_sfixed32"),
99: .standard(proto: "packed_sfixed64"),
100: .standard(proto: "packed_float"),
101: .standard(proto: "packed_double"),
102: .standard(proto: "packed_bool"),
103: .standard(proto: "packed_enum"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
switch fieldNumber {
case 90: try decoder.decodeRepeatedInt32Field(value: &self.packedInt32)
case 91: try decoder.decodeRepeatedInt64Field(value: &self.packedInt64)
case 92: try decoder.decodeRepeatedUInt32Field(value: &self.packedUint32)
case 93: try decoder.decodeRepeatedUInt64Field(value: &self.packedUint64)
case 94: try decoder.decodeRepeatedSInt32Field(value: &self.packedSint32)
case 95: try decoder.decodeRepeatedSInt64Field(value: &self.packedSint64)
case 96: try decoder.decodeRepeatedFixed32Field(value: &self.packedFixed32)
case 97: try decoder.decodeRepeatedFixed64Field(value: &self.packedFixed64)
case 98: try decoder.decodeRepeatedSFixed32Field(value: &self.packedSfixed32)
case 99: try decoder.decodeRepeatedSFixed64Field(value: &self.packedSfixed64)
case 100: try decoder.decodeRepeatedFloatField(value: &self.packedFloat)
case 101: try decoder.decodeRepeatedDoubleField(value: &self.packedDouble)
case 102: try decoder.decodeRepeatedBoolField(value: &self.packedBool)
case 103: try decoder.decodeRepeatedEnumField(value: &self.packedEnum)
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.packedInt32.isEmpty {
try visitor.visitPackedInt32Field(value: self.packedInt32, fieldNumber: 90)
}
if !self.packedInt64.isEmpty {
try visitor.visitPackedInt64Field(value: self.packedInt64, fieldNumber: 91)
}
if !self.packedUint32.isEmpty {
try visitor.visitPackedUInt32Field(value: self.packedUint32, fieldNumber: 92)
}
if !self.packedUint64.isEmpty {
try visitor.visitPackedUInt64Field(value: self.packedUint64, fieldNumber: 93)
}
if !self.packedSint32.isEmpty {
try visitor.visitPackedSInt32Field(value: self.packedSint32, fieldNumber: 94)
}
if !self.packedSint64.isEmpty {
try visitor.visitPackedSInt64Field(value: self.packedSint64, fieldNumber: 95)
}
if !self.packedFixed32.isEmpty {
try visitor.visitPackedFixed32Field(value: self.packedFixed32, fieldNumber: 96)
}
if !self.packedFixed64.isEmpty {
try visitor.visitPackedFixed64Field(value: self.packedFixed64, fieldNumber: 97)
}
if !self.packedSfixed32.isEmpty {
try visitor.visitPackedSFixed32Field(value: self.packedSfixed32, fieldNumber: 98)
}
if !self.packedSfixed64.isEmpty {
try visitor.visitPackedSFixed64Field(value: self.packedSfixed64, fieldNumber: 99)
}
if !self.packedFloat.isEmpty {
try visitor.visitPackedFloatField(value: self.packedFloat, fieldNumber: 100)
}
if !self.packedDouble.isEmpty {
try visitor.visitPackedDoubleField(value: self.packedDouble, fieldNumber: 101)
}
if !self.packedBool.isEmpty {
try visitor.visitPackedBoolField(value: self.packedBool, fieldNumber: 102)
}
if !self.packedEnum.isEmpty {
try visitor.visitPackedEnumField(value: self.packedEnum, fieldNumber: 103)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Proto3ArenaUnittest_TestPackedTypes, rhs: Proto3ArenaUnittest_TestPackedTypes) -> Bool {
if lhs.packedInt32 != rhs.packedInt32 {return false}
if lhs.packedInt64 != rhs.packedInt64 {return false}
if lhs.packedUint32 != rhs.packedUint32 {return false}
if lhs.packedUint64 != rhs.packedUint64 {return false}
if lhs.packedSint32 != rhs.packedSint32 {return false}
if lhs.packedSint64 != rhs.packedSint64 {return false}
if lhs.packedFixed32 != rhs.packedFixed32 {return false}
if lhs.packedFixed64 != rhs.packedFixed64 {return false}
if lhs.packedSfixed32 != rhs.packedSfixed32 {return false}
if lhs.packedSfixed64 != rhs.packedSfixed64 {return false}
if lhs.packedFloat != rhs.packedFloat {return false}
if lhs.packedDouble != rhs.packedDouble {return false}
if lhs.packedBool != rhs.packedBool {return false}
if lhs.packedEnum != rhs.packedEnum {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Proto3ArenaUnittest_TestUnpackedTypes: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".TestUnpackedTypes"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "repeated_int32"),
2: .standard(proto: "repeated_int64"),
3: .standard(proto: "repeated_uint32"),
4: .standard(proto: "repeated_uint64"),
5: .standard(proto: "repeated_sint32"),
6: .standard(proto: "repeated_sint64"),
7: .standard(proto: "repeated_fixed32"),
8: .standard(proto: "repeated_fixed64"),
9: .standard(proto: "repeated_sfixed32"),
10: .standard(proto: "repeated_sfixed64"),
11: .standard(proto: "repeated_float"),
12: .standard(proto: "repeated_double"),
13: .standard(proto: "repeated_bool"),
14: .standard(proto: "repeated_nested_enum"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
switch fieldNumber {
case 1: try decoder.decodeRepeatedInt32Field(value: &self.repeatedInt32)
case 2: try decoder.decodeRepeatedInt64Field(value: &self.repeatedInt64)
case 3: try decoder.decodeRepeatedUInt32Field(value: &self.repeatedUint32)
case 4: try decoder.decodeRepeatedUInt64Field(value: &self.repeatedUint64)
case 5: try decoder.decodeRepeatedSInt32Field(value: &self.repeatedSint32)
case 6: try decoder.decodeRepeatedSInt64Field(value: &self.repeatedSint64)
case 7: try decoder.decodeRepeatedFixed32Field(value: &self.repeatedFixed32)
case 8: try decoder.decodeRepeatedFixed64Field(value: &self.repeatedFixed64)
case 9: try decoder.decodeRepeatedSFixed32Field(value: &self.repeatedSfixed32)
case 10: try decoder.decodeRepeatedSFixed64Field(value: &self.repeatedSfixed64)
case 11: try decoder.decodeRepeatedFloatField(value: &self.repeatedFloat)
case 12: try decoder.decodeRepeatedDoubleField(value: &self.repeatedDouble)
case 13: try decoder.decodeRepeatedBoolField(value: &self.repeatedBool)
case 14: try decoder.decodeRepeatedEnumField(value: &self.repeatedNestedEnum)
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.repeatedInt32.isEmpty {
try visitor.visitRepeatedInt32Field(value: self.repeatedInt32, fieldNumber: 1)
}
if !self.repeatedInt64.isEmpty {
try visitor.visitRepeatedInt64Field(value: self.repeatedInt64, fieldNumber: 2)
}
if !self.repeatedUint32.isEmpty {
try visitor.visitRepeatedUInt32Field(value: self.repeatedUint32, fieldNumber: 3)
}
if !self.repeatedUint64.isEmpty {
try visitor.visitRepeatedUInt64Field(value: self.repeatedUint64, fieldNumber: 4)
}
if !self.repeatedSint32.isEmpty {
try visitor.visitRepeatedSInt32Field(value: self.repeatedSint32, fieldNumber: 5)
}
if !self.repeatedSint64.isEmpty {
try visitor.visitRepeatedSInt64Field(value: self.repeatedSint64, fieldNumber: 6)
}
if !self.repeatedFixed32.isEmpty {
try visitor.visitRepeatedFixed32Field(value: self.repeatedFixed32, fieldNumber: 7)
}
if !self.repeatedFixed64.isEmpty {
try visitor.visitRepeatedFixed64Field(value: self.repeatedFixed64, fieldNumber: 8)
}
if !self.repeatedSfixed32.isEmpty {
try visitor.visitRepeatedSFixed32Field(value: self.repeatedSfixed32, fieldNumber: 9)
}
if !self.repeatedSfixed64.isEmpty {
try visitor.visitRepeatedSFixed64Field(value: self.repeatedSfixed64, fieldNumber: 10)
}
if !self.repeatedFloat.isEmpty {
try visitor.visitRepeatedFloatField(value: self.repeatedFloat, fieldNumber: 11)
}
if !self.repeatedDouble.isEmpty {
try visitor.visitRepeatedDoubleField(value: self.repeatedDouble, fieldNumber: 12)
}
if !self.repeatedBool.isEmpty {
try visitor.visitRepeatedBoolField(value: self.repeatedBool, fieldNumber: 13)
}
if !self.repeatedNestedEnum.isEmpty {
try visitor.visitRepeatedEnumField(value: self.repeatedNestedEnum, fieldNumber: 14)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Proto3ArenaUnittest_TestUnpackedTypes, rhs: Proto3ArenaUnittest_TestUnpackedTypes) -> Bool {
if lhs.repeatedInt32 != rhs.repeatedInt32 {return false}
if lhs.repeatedInt64 != rhs.repeatedInt64 {return false}
if lhs.repeatedUint32 != rhs.repeatedUint32 {return false}
if lhs.repeatedUint64 != rhs.repeatedUint64 {return false}
if lhs.repeatedSint32 != rhs.repeatedSint32 {return false}
if lhs.repeatedSint64 != rhs.repeatedSint64 {return false}
if lhs.repeatedFixed32 != rhs.repeatedFixed32 {return false}
if lhs.repeatedFixed64 != rhs.repeatedFixed64 {return false}
if lhs.repeatedSfixed32 != rhs.repeatedSfixed32 {return false}
if lhs.repeatedSfixed64 != rhs.repeatedSfixed64 {return false}
if lhs.repeatedFloat != rhs.repeatedFloat {return false}
if lhs.repeatedDouble != rhs.repeatedDouble {return false}
if lhs.repeatedBool != rhs.repeatedBool {return false}
if lhs.repeatedNestedEnum != rhs.repeatedNestedEnum {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Proto3ArenaUnittest_NestedTestAllTypes: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".NestedTestAllTypes"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "child"),
2: .same(proto: "payload"),
3: .standard(proto: "repeated_child"),
]
fileprivate class _StorageClass {
var _child: Proto3ArenaUnittest_NestedTestAllTypes? = nil
var _payload: Proto3ArenaUnittest_TestAllTypes? = nil
var _repeatedChild: [Proto3ArenaUnittest_NestedTestAllTypes] = []
static let defaultInstance = _StorageClass()
private init() {}
init(copying source: _StorageClass) {
_child = source._child
_payload = source._payload
_repeatedChild = source._repeatedChild
}
}
fileprivate mutating func _uniqueStorage() -> _StorageClass {
if !isKnownUniquelyReferenced(&_storage) {
_storage = _StorageClass(copying: _storage)
}
return _storage
}
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
_ = _uniqueStorage()
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
while let fieldNumber = try decoder.nextFieldNumber() {
switch fieldNumber {
case 1: try decoder.decodeSingularMessageField(value: &_storage._child)
case 2: try decoder.decodeSingularMessageField(value: &_storage._payload)
case 3: try decoder.decodeRepeatedMessageField(value: &_storage._repeatedChild)
default: break
}
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
if let v = _storage._child {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
}
if let v = _storage._payload {
try visitor.visitSingularMessageField(value: v, fieldNumber: 2)
}
if !_storage._repeatedChild.isEmpty {
try visitor.visitRepeatedMessageField(value: _storage._repeatedChild, fieldNumber: 3)
}
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Proto3ArenaUnittest_NestedTestAllTypes, rhs: Proto3ArenaUnittest_NestedTestAllTypes) -> Bool {
if lhs._storage !== rhs._storage {
let storagesAreEqual: Bool = withExtendedLifetime((lhs._storage, rhs._storage)) { (_args: (_StorageClass, _StorageClass)) in
let _storage = _args.0
let rhs_storage = _args.1
if _storage._child != rhs_storage._child {return false}
if _storage._payload != rhs_storage._payload {return false}
if _storage._repeatedChild != rhs_storage._repeatedChild {return false}
return true
}
if !storagesAreEqual {return false}
}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Proto3ArenaUnittest_ForeignMessage: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".ForeignMessage"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "c"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
switch fieldNumber {
case 1: try decoder.decodeSingularInt32Field(value: &self.c)
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if self.c != 0 {
try visitor.visitSingularInt32Field(value: self.c, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Proto3ArenaUnittest_ForeignMessage, rhs: Proto3ArenaUnittest_ForeignMessage) -> Bool {
if lhs.c != rhs.c {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Proto3ArenaUnittest_TestEmptyMessage: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".TestEmptyMessage"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Proto3ArenaUnittest_TestEmptyMessage, rhs: Proto3ArenaUnittest_TestEmptyMessage) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
| {
"pile_set_name": "Github"
} |
#!/bin/bash
# WINDOWS PACKAGING SCRIPT FOR NAEV
# Requires NSIS, and python3-pip to be installed
#
# This script should be run after compiling Naev
# It detects the current environment, and builds the appropriate NSIS installer
# into the root naev directory.
#
# Checks if argument(s) are valid
if [[ $1 == "--nightly" ]]; then
echo "Building for nightly release"
NIGHTLY=true
# Get Formatted Date
BUILD_DATE="$(date +%m_%d_%Y)"
elif [[ $1 == "" ]]; then
echo "No arguments passed, assuming normal release"
NIGHTLY=false
elif [[ $1 != "--nightly" ]]; then
echo "Please use argument --nightly if you are building this as a nightly build"
exit -1
else
echo "Something went wrong."
exit -1
fi
# Check if we are running in the right place
if [[ ! -f "naev.6" ]]; then
echo "Please run from Naev root directory."
exit -1
fi
# Rudementary way of detecting which environment we are packaging..
# It works, and it should remain working until msys changes their naming scheme
if [[ $PATH == *"mingw32"* ]]; then
echo "Detected MinGW32 environment"
ARCH="32"
elif [[ $PATH == *"mingw64"* ]]; then
echo "Detected MinGW64 environment"
ARCH="64"
else
echo "Welp, I don't know what environment this is... Make sure you are running this in an MSYS2 MinGW environment"
exit -1
fi
VERSION="$(cat $(pwd)/VERSION)"
BETA=false
# Get version, negative minors mean betas
if [[ -n $(echo "$VERSION" | grep "-") ]]; then
BASEVER=$(echo "$VERSION" | sed 's/\.-.*//')
BETAVER=$(echo "$VERSION" | sed 's/.*-//')
VERSION="$BASEVER.0-beta.$BETAVER"
BETA=true
else
echo "could not find VERSION file"
exit -1
fi
# Download and Install mingw-ldd
echo "Update pip"
pip3 install --upgrade pip
echo "Install mingw-ldd script"
pip3 install mingw-ldd
# Move compiled binary to staging folder.
echo "creating staging area"
mkdir -p extras/windows/installer/bin
# Move data to staging folder
echo "moving data to staging area"
cp -r dat/ extras/windows/installer/bin
cp AUTHORS extras/windows/installer/bin
cp VERSION extras/windows/installer/bin
# Collect DLLs
if [[ $ARCH == "32" ]]; then
for fn in `mingw-ldd naev.exe --dll-lookup-dirs /mingw32/bin | grep -i "mingw32" | cut -f1 -d"/" --complement`; do
fp="/"$fn
echo "copying $fp to staging area"
cp $fp extras/windows/installer/bin
done
elif [[ $ARCH == "64" ]]; then
for fn in `mingw-ldd naev.exe --dll-lookup-dirs /mingw64/bin | grep -i "mingw64" | cut -f1 -d"/" --complement`; do
fp="/"$fn
echo "copying $fp to staging area"
cp $fp extras/windows/installer/bin
done
else
echo "Aw, man, I shot Marvin in the face..."
echo "Something went wrong while looking for DLLs to stage."
exit -1
fi
echo "copying naev binary to staging area"
if [[ $NIGHTLY == true ]]; then
cp src/naev.exe extras/windows/installer/bin/naev-$VERSION-$BUILD_DATE-win$ARCH.exe
elif [[ $NIGHTLY == false ]]; then
cp src/naev.exe extras/windows/installer/bin/naev-$VERSION-win$ARCH.exe
else
echo "Cannot think of another movie quote."
echo "Something went wrong while copying binary to staging area."
exit -1
fi
# Create distribution folder
echo "creating distribution folder"
mkdir -p dist/release
# Build installer
if [[ $NIGHTLY == true ]]; then
if [[ $BETA == true ]]; then
makensis -DVERSION=$BASEVER.0 -DVERSION_SUFFIX=-beta.$BETAVER-$BUILD_DATE -DARCH=$ARCH extras/windows/installer/naev.nsi
elif [[ $BETA == false ]]; then
makensis -DVERSION=$VERSION -DVERSION_SUFFIX=-$BUILD_DATE -DARCH=$ARCH extras/windows/installer/naev.nsi
else
echo "Something went wrong determining if this is a beta or not."
fi
# Move installer to distribution directory
mv extras/windows/installer/naev-$VERSION-$BUILD_DATE-win$ARCH.exe dist/release/naev-win$ARCH.exe
elif [[ $NIGHTLY == false ]]; then
if [[ $BETA == true ]]; then
makensis -DVERSION=$BASEVER.0 -DVERSION_SUFFIX=-beta.$BETAVER -DARCH=$ARCH extras/windows/installer/naev.nsi
elif [[ $BETA == false ]]; then
makensis -DVERSION=$VERSION -DVERSION_SUFFIX= -DARCH=$ARCH extras/windows/installer/naev.nsi
else
echo "Something went wrong determining if this is a beta or not."
fi
# Move installer to distribution directory
mv extras/windows/installer/naev-$VERSION-win$ARCH.exe dist/release/naev-win$ARCH.exe
else
echo "Cannot think of another movie quote.. again."
echo "Something went wrong.."
exit -1
fi
echo "Successfully built Windows Installer for win$ARCH"
# Package zip
cd extras/windows/installer/bin
zip ../../../../dist/release/naev-win$ARCH.zip *.dll *.exe
cd ../../../../
echo "Successfully packaged zipped folder for win$ARCH"
echo "Cleaning up staging area"
rm -rf extras/windows/installer/bin
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
android:paddingBottom="@dimen/activity_vertical_margin"
android:paddingLeft="@dimen/activity_horizontal_margin"
android:paddingRight="@dimen/activity_horizontal_margin"
android:paddingTop="@dimen/activity_vertical_margin"
tools:context="it.tiwiz.rxjavacrunch.part9.Part9Activity">
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Retain configuration instance (value)" />
<TextView
android:id="@+id/currentValue"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:textAppearance="?android:textAppearanceLarge"
android:padding="@dimen/activity_vertical_margin"
android:gravity="center"
tools:text="10"/>
</LinearLayout>
| {
"pile_set_name": "Github"
} |
--- contrib/virt.te 2012-11-25 21:35:09.181247450 +0100
+++ contrib/virt.te 2012-11-25 21:34:09.223216815 +0100
@@ -281,7 +281,11 @@
userdom_search_user_home_dirs(virt_domain)
userdom_read_all_users_state(virt_domain)
-qemu_exec(virt_domain)
+ifdef(`distro_gentoo',`
+ optional_policy(`
+ qemu_exec(virt_domain)
+ ')
+')
tunable_policy(`virt_use_execmem',`
allow virt_domain self:process { execmem execstack };
| {
"pile_set_name": "Github"
} |
/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// +k8s:deepcopy-gen=package
// +k8s:protobuf-gen=package
// +k8s:openapi-gen=true
// +groupName=coordination.k8s.io
package v1 // import "k8s.io/api/coordination/v1"
| {
"pile_set_name": "Github"
} |
/* eslint-disable eslint-comments/disable-enable-pair */
/* eslint-disable import/no-mutable-exports */
let CURRENT = 'NULL';
/**
* use authority or getAuthority
* @param {string|()=>String} currentAuthority
*/
const renderAuthorize = Authorized => currentAuthority => {
if (currentAuthority) {
if (typeof currentAuthority === 'function') {
CURRENT = currentAuthority();
}
if (
Object.prototype.toString.call(currentAuthority) === '[object String]' ||
Array.isArray(currentAuthority)
) {
CURRENT = currentAuthority;
}
} else {
CURRENT = 'NULL';
}
return Authorized;
};
export { CURRENT };
export default Authorized => renderAuthorize(Authorized);
| {
"pile_set_name": "Github"
} |