text
stringlengths 27
775k
|
---|
/**
* Enum for isolation levels
* @readonly
* @enum {number}
*/
module.exports = {
// Makes all records visible
READ_UNCOMMITTED: 0,
// non-transactional and COMMITTED transactional records are visible. It returns all data
// from offsets smaller than the current LSO (last stable offset), and enables the inclusion of
// the list of aborted transactions in the result, which allows consumers to discard ABORTED
// transactional records
READ_COMMITTED: 1,
}
|
class CommentsController < ApplicationController
post '/reviews/:slug/comments' do
if logged_in?
if @review = Review.find_by_slug(params[:slug])
comment = @review.comments.new(content: params[:comment][:content], user: current_user)
if comment.save
flash[:message] = "Successfully submitted a comment"
redirect to "/reviews/#{@review.slug}"
else
flash[:message] = "Please add a comment"
end
redirect to "/reviews/#{@review.slug}"
else
redirect to '/reviews'
end
else
redirect '/login'
end
end
end
|
package org.apache.hadoop.hdfs.notifier.server;
public class EmptyServerClientTracker implements IServerClientTracker{
@Override
public void run() {}
@Override
public void setClientTimeout(long timeout) {}
@Override
public void setHeartbeatTimeout(long timeout) {}
@Override
public void handleFailedDispatch(long clientId, long lastFailed) {}
@Override
public void handleSuccessfulDispatch(long clientId, long lastSent) {}
}
|
unit MVVM.Bindings.Commands;
interface
uses
System.Actions,
MVVM.Interfaces;
type
TBindingCommandAction = class(TBindingCommandBase<TContainedAction>)
protected
procedure DoEnabled; override;
procedure DoDisabled; override;
public
procedure Execute; override;
end;
implementation
{ TBindingCommandAction }
procedure TBindingCommandAction.DoDisabled;
begin
FCommand.Enabled := False;
end;
procedure TBindingCommandAction.DoEnabled;
begin
FCommand.Enabled := True;
end;
procedure TBindingCommandAction.Execute;
begin
if Enabled then
if CanExecute then
FCommand.Execute;
end;
end.
|
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow;
import org.tensorflow.op.Op;
import org.tensorflow.types.family.TType;
/**
* Interface implemented by operands of a TensorFlow operation.
*
* <p>Example usage:
*
* <pre>{@code
* Ops tf = Ops.create();
*
* // The "decodeJpeg" operation can be used as an operand to the "cast" operation
* Operand<TUint8> decodeJpeg = tf.image.decodeJpeg(...);
* tf.dtypes.cast(decodeJpeg, TFloat32.DTYPE);
*
* // The output "y" of the "unique" operation can be used as an operand to the "cast" operation
* Output<TInt32> y = tf.unique(...).y();
* tf.dtypes.cast(y, TFloat32.DTYPE);
*
* // The "split" operation can be used as operand list to the "concat" operation
* Iterable<? extends Operand<TFloat32>> split = tf.split(...);
* tf.concat(split, tf.constant(0));
* }</pre>
*/
public interface Operand<T extends TType> extends Op {
/**
* Returns the symbolic handle of the tensor.
*
* <p>Inputs to TensorFlow operations are outputs of another TensorFlow operation. This method is
* used to obtain a symbolic handle that represents the computation of the input.
*
* @see OperationBuilder#addInput(Output)
*/
Output<T> asOutput();
/**
* Returns this operand as a tensor.
*
* <i>Only works when running in an eager execution</i>
* <p>This helper method is equivalent to {@code asOutput().tensor()}
*
* @return the tensor
* @throws IllegalStateException if this is an operand of a graph
*/
default Tensor<T> asTensor() {
return asOutput().tensor();
}
/**
* Returns the data of this operand.
*
* <i>Only works when running in an eager execution</i>
* <p>This helper method is equivalent to {@code asTensor().data()}
*
* @return the tensor data
* @throws IllegalStateException if this is an operand of a graph
*/
default T data() {
return asOutput().tensor().data();
}
}
|
---
date: 2018-02-28
title: Changing your password
categories:
- account
description: How to change your account's password
type: Document
---
## Log in
Log in your account at [https://roburst.co](https://roburst.co).
In the **Setting** section, click _Profile_.
## Change password
Click _Change password_.
Type your old and new password.
## Finish
Congratulations! Your password has been successfully changed.
|
use std::{any::Any, borrow::Cow, fmt::Debug};
use figures::{Point, Points, Size};
use crate::{
styles::style_sheet::Classes, AnyFrontend, Pixels, StyledWidget, Widget, WidgetRegistration,
WidgetStorage, ROOT_CLASS,
};
type InitializerFn<W> = dyn FnOnce(&WidgetStorage) -> StyledWidget<W>;
/// A builder for a Window.
#[must_use]
pub struct WindowBuilder<W: Widget> {
/// The function that creates the root widget for this window.
pub initializer: Option<Box<InitializerFn<W>>>,
/// The intial configuration of the window.
pub configuration: WindowConfiguration,
}
/// Configuration options used when opening a window.
#[derive(Clone, Debug)]
#[allow(clippy::struct_excessive_bools)]
pub struct WindowConfiguration {
/// The title of the window. If not set, "Gooey - Kludgine" will be used.
pub title: Option<String>,
/// The initial position of the window. If None, the system will place it by
/// its default methods. The point is in screen coordinates, relative to the
/// top-left of the primary display. Coordinates can be negative.
pub position: Option<Point<i32, Pixels>>,
/// The initial size of the window. The default value is `Size::new(1024,
/// 768)`.
pub size: Size<u32, Points>,
/// If true, the window can be resized by the user. Defaults to true.
pub resizable: bool,
/// If true, the window will start maximized. Defaults to false.
pub maximized: bool,
/// If true, where the background color is transparent, the window will show
/// content behind it. Defaults to false.
pub transparent: bool,
/// Determines whether the window should have its normal decorations, such
/// as the title bar and border.
pub decorations: bool,
/// Sets whether the window should always be on top of other windows.
pub always_on_top: bool,
}
impl Default for WindowConfiguration {
fn default() -> Self {
Self {
title: None,
position: None,
size: Size::new(1024, 768),
resizable: true,
maximized: false,
transparent: false,
decorations: true,
always_on_top: false,
}
}
}
impl<W: Widget> Debug for WindowBuilder<W> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("WindowBuilder").finish_non_exhaustive()
}
}
impl<W: Widget> WindowBuilder<W> {
/// Creates a new builder with `initializer` used to create the root widget.
pub fn new<F: FnOnce(&WidgetStorage) -> StyledWidget<W> + 'static>(initializer: F) -> Self {
Self {
initializer: Some(Box::new(initializer)),
configuration: WindowConfiguration::default(),
}
}
/// Sets the window's title.
pub fn title(mut self, title: impl Into<String>) -> Self {
self.configuration.title = Some(title.into());
self
}
/// Sets the window's position (in screen coordinates).
pub fn position(mut self, location: impl Into<Point<i32, Pixels>>) -> Self {
self.configuration.position = Some(location.into());
self
}
/// Sets the window's size.
pub fn size(mut self, size: impl Into<Size<u32, Points>>) -> Self {
self.configuration.size = size.into();
self
}
/// Prevents the window from being resized.
pub fn non_resizable(mut self) -> Self {
self.configuration.resizable = false;
self
}
/// Maximizes the window upon opening.
pub fn maximize(mut self) -> Self {
self.configuration.maximized = true;
self
}
/// Enables transparent window handling, if the platform supports it.
/// Background colors that have transparency will allow other content to
/// show through.
pub fn transparent(mut self) -> Self {
self.configuration.transparent = true;
self
}
/// Removes decorations from the window (such as the title bar).
pub fn plain(mut self) -> Self {
self.configuration.decorations = false;
self
}
/// Sets that the window should stay on top of all other windows.
pub fn always_on_top(mut self) -> Self {
self.configuration.always_on_top = true;
self
}
/// Opens the window. Only possible on platforms that support multiple windows.
#[allow(clippy::must_use_candidate)]
pub fn open(self, frontend: &dyn AnyFrontend) -> bool {
frontend.open(Box::new(self))
}
}
/// A [`WindowBuilder`] that has had its widget type parameter erased.
pub trait AnyWindowBuilder: Any {
/// Casts this value to a mutable [`Any`] reference.
fn as_mut_any(&mut self) -> &mut dyn Any;
/// Returns the window configuration.
fn configuration(&self) -> WindowConfiguration;
/// Builds the window's root content and returns the registration.
fn build(&mut self, storage: &WidgetStorage) -> WidgetRegistration;
}
impl<W: Widget> AnyWindowBuilder for WindowBuilder<W> {
fn as_mut_any(&mut self) -> &mut dyn Any {
self
}
fn configuration(&self) -> WindowConfiguration {
self.configuration.clone()
}
fn build(&mut self, storage: &WidgetStorage) -> WidgetRegistration {
let initializer = self.initializer.take().expect("already built");
let mut root = initializer(storage);
// Append the root class to the root widget.
let mut classes = root.style.get::<Classes>().cloned().unwrap_or_default();
classes.insert(Cow::from(ROOT_CLASS));
root.style.push(classes);
storage.register(root)
}
}
|
# Android Development
## Android Studio Configuration

### Application Runner

> Important! to set checkbox `Allow parallel run`.
```bash
# cd react-native-keychain
./gradlew :android
```
### Unit Tests

```bash
# cd react-native-keychain
./gradlew test
```
### Start React Native Metro Bundler

```bash
# cd react-native-keychain/KeychainExample
react-native start --reset-cache
```
```bash
# set working dir to: 'react-native-keychain/KeychainExample'
/usr/bin/env node node_modules/.bin/react-native start --reset-cache
```
> Important! to set checkbox `Allow parallel run`.
### Create Automatic self-refreshed TCP ports binding

```bash
# brew install watch
/usr/local/bin/watch -n 5 "adb reverse tcp:8081 tcp:8081 && adb reverse tcp:8097 tcp:8097 && adb reverse --list"
```
> Important! to set checkbox `Allow parallel run`.
### Source code synchronization task
Needed for automatic re-publishing of changes source code for sample:

The same results can be achieved by executing this command:
```bash
# cd react-native-keychain/KeychainExample
yarn --force
# cd react-native-keychain
./gradlew updateLibrarySourcesInExample
```
|
package com.tiernebre.zone_blitz.token.user_confirmation;
import com.tiernebre.zone_blitz.user.dto.UserDto;
import lombok.RequiredArgsConstructor;
import org.jooq.DSLContext;
import org.springframework.stereotype.Repository;
import java.util.Optional;
import static com.tiernebre.zone_blitz.jooq.Tables.USER_CONFIRMATION_TOKEN;
@Repository
@RequiredArgsConstructor
public class UserConfirmationTokenJooqRepository implements UserConfirmationTokenRepository {
private final DSLContext dslContext;
@Override
public UserConfirmationTokenEntity createOneForUser(UserDto user) {
return dslContext
.insertInto(USER_CONFIRMATION_TOKEN, USER_CONFIRMATION_TOKEN.USER_ID)
.values(user.getId())
.returningResult(USER_CONFIRMATION_TOKEN.asterisk())
.fetchOne()
.into(UserConfirmationTokenEntity.class);
}
@Override
public Optional<UserConfirmationTokenEntity> findOneForUser(UserDto user) {
return dslContext
.selectFrom(USER_CONFIRMATION_TOKEN)
.where(USER_CONFIRMATION_TOKEN.USER_ID.eq(user.getId()))
.fetchOptionalInto(UserConfirmationTokenEntity.class);
}
}
|
# investing-datascience-style
Watch this video for reference: https://youtu.be/4jaBKXDqg9U
## Required Packages:
1. numpy
2. pandas
3. matplotlib
4. datetime
5. time
6. yfinance
7. os
8. cufflinks
9. plotly
10. warnings
|
from redbot import core
from redbot.core import VersionInfo
def test_version_working():
assert hasattr(core, "__version__")
assert core.__version__[0] == "3"
# When adding more of these, ensure they are added in ascending order of precedence
version_tests = (
"3.0.0a32.post10.dev12",
"3.0.0rc1.dev1",
"3.0.0rc1",
"3.0.0",
"3.0.1",
"3.0.1.post1.dev1",
"3.0.1.post1",
"2018.10.6b21",
)
def test_version_info_str_parsing():
for version_str in version_tests:
assert version_str == str(VersionInfo.from_str(version_str))
def test_version_info_lt():
for next_idx, cur in enumerate(version_tests[:-1], start=1):
cur_test = VersionInfo.from_str(cur)
next_test = VersionInfo.from_str(version_tests[next_idx])
assert cur_test < next_test
def test_version_info_gt():
assert VersionInfo.from_str(version_tests[1]) > VersionInfo.from_str(version_tests[0])
|
export PIG_HOME=/usr/local/pig
export PIG_CONF_DIR=$PIG_HOME/conf
export PATH=${PIG_HOME}/bin:${PATH}
#export PIG_CLASSPATH=/usr/local/hadoop/conf
#export PATH=${PIG_HOME}/sbin:${PATH}
#export PATH=${PIG_HOME}/bin:${PIG_HOME}/sbin:${PATH}
|
import 'package:angular/angular.dart';
import 'package:angular_components/angular_components.dart';
import 'package:gurps_incantation_magic_model/incantation_magic.dart';
@Component(
selector: 'mjw-drawback-list-editor',
styleUrls: const ['spell_editor.css'],
directives: const <dynamic>[
coreDirectives,
materialDirectives,
materialInputDirectives,
MaterialNumberValueAccessor,
],
template: '''
<div class='left-component-wrap'>
<material-button icon class='add-btn material-list-item-secondary' (trigger)='addDrawback()'>
<glyph icon='add_circle'></glyph>
</material-button>
<div class='left-component subheading'>DRAWBACKS</div>
</div>
<div *ngFor='let item of drawbacks; let i = index' class='left-component-wrap'>
<span class="left-component">
<material-input style='width: 70%;' type='text' label="LIMITATION" floatingLabel [(ngModel)]="item.name">
</material-input>
<material-input style='width: 12%;' type="number" checkInteger trailingText="%" rightAlign
[(ngModel)]="item.level"></material-input>
</span>
<material-button icon class='remove-btn' (trigger)='drawbacks.removeAt(i)' style="margin-right: 24px;">
<glyph icon='remove_circle'></glyph>
</material-button>
</div>
<material-list></material-list>
''',
providers: const <dynamic>[materialProviders],
)
class DrawbackListEditor {
@Input()
List<TraitModifier> drawbacks;
void addDrawback() {
drawbacks.add(new TraitModifier('', null, 0));
}
}
|
package com.jjh.actors.classic
import akka.actor.{Actor, ActorSystem, Props}
object Calculator {
def props: Props = Props[Calculator]
}
class Calculator extends Actor {
def receive: PartialFunction[Any, Unit] = {
case x: Int =>
println("Calculator received: " + x)
var total = x
for (i <- 1 to x) total = total * i
println("Calculator processing completed: " + total)
}
}
object ActorPropertyFactoryApp extends App {
val system = ActorSystem("MyActorSystem")
val actor = system.actorOf(Calculator.props)
actor ! 4
println("Message sent")
//shutdown the actor system
system.terminate()
}
|
namespace HareDu.Tests
{
using System.Threading.Tasks;
using Extensions;
using Microsoft.Extensions.DependencyInjection;
using Model;
using NUnit.Framework;
[TestFixture]
public class BindingTests :
HareDuTesting
{
[Test]
public async Task Verify_able_to_get_all_bindings1()
{
var services = GetContainerBuilder("TestData/BindingInfo.json").BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.GetAll();
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasData);
Assert.AreEqual(12, result.Data.Count);
Assert.IsFalse(result.HasFaulted);
Assert.IsNotNull(result.Data);
});
}
[Test]
public async Task Verify_able_to_get_all_bindings2()
{
var services = GetContainerBuilder("TestData/BindingInfo.json").BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.GetAllBindings();
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasData);
Assert.AreEqual(12, result.Data.Count);
Assert.IsFalse(result.HasFaulted);
Assert.IsNotNull(result.Data);
});
}
[Test]
public async Task Verify_can_create_exchange_binding_without_arguments1()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.Create("E2", "Q1", BindingType.Exchange, "HareDu");
Assert.Multiple(() =>
{
Assert.IsFalse(result.HasFaulted);
Assert.IsNotNull(result.DebugInfo);
Assert.IsNotNull(result.DebugInfo.Request);
BindingRequest request = result.DebugInfo.Request.ToObject<BindingRequest>();
Assert.That(request.BindingKey, Is.Empty.Or.Null);
Assert.IsNull(request.Arguments);
});
}
[Test]
public async Task Verify_can_create_exchange_binding_without_arguments2()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.CreateExchangeBinding("E2", "Q1", "HareDu");
Assert.Multiple(() =>
{
Assert.IsFalse(result.HasFaulted);
Assert.IsNotNull(result.DebugInfo);
Assert.IsNotNull(result.DebugInfo.Request);
BindingRequest request = result.DebugInfo.Request.ToObject<BindingRequest>();
Assert.That(request.BindingKey, Is.Empty.Or.Null);
Assert.IsNull(request.Arguments);
});
}
[Test]
public async Task Verify_can_create_queue_binding_without_arguments1()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.Create("E2", "Q1", BindingType.Queue, "HareDu");
Assert.Multiple(() =>
{
Assert.IsFalse(result.HasFaulted);
Assert.IsNotNull(result.DebugInfo);
Assert.IsNotNull(result.DebugInfo.Request);
BindingRequest request = result.DebugInfo.Request.ToObject<BindingRequest>();
Assert.That(request.BindingKey, Is.Empty.Or.Null);
Assert.IsNull(request.Arguments);
});
}
[Test]
public async Task Verify_can_create_queue_binding_without_arguments2()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.CreateExchangeBindingToQueue("E2", "Q1", "HareDu");
Assert.Multiple(() =>
{
Assert.IsFalse(result.HasFaulted);
Assert.IsNotNull(result.DebugInfo);
Assert.IsNotNull(result.DebugInfo.Request);
BindingRequest request = result.DebugInfo.Request.ToObject<BindingRequest>();
Assert.That(request.BindingKey, Is.Empty.Or.Null);
Assert.IsNull(request.Arguments);
});
}
[Test]
public async Task Verify_can_create_exchange_binding_with_arguments1()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.Create("E2", "Q1", BindingType.Exchange, "HareDu", "*.", x =>
{
x.Add("arg1", "value1");
});
Assert.Multiple(() =>
{
Assert.IsFalse(result.HasFaulted);
Assert.IsNotNull(result.DebugInfo);
Assert.IsNotNull(result.DebugInfo.Request);
BindingRequest request = result.DebugInfo.Request.ToObject<BindingRequest>();
Assert.AreEqual("*.", request.BindingKey);
Assert.AreEqual("value1", request.Arguments["arg1"].ToString());
});
}
[Test]
public async Task Verify_can_create_exchange_binding_with_arguments2()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.CreateExchangeBinding("E2", "Q1", "HareDu", "*.", x =>
{
x.Add("arg1", "value1");
});
Assert.Multiple(() =>
{
Assert.IsFalse(result.HasFaulted);
Assert.IsNotNull(result.DebugInfo);
Assert.IsNotNull(result.DebugInfo.Request);
BindingRequest request = result.DebugInfo.Request.ToObject<BindingRequest>();
Assert.AreEqual("*.", request.BindingKey);
Assert.AreEqual("value1", request.Arguments["arg1"].ToString());
});
}
[Test]
public async Task Verify_can_create_queue_binding_with_arguments1()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.Create("E2", "Q1", BindingType.Queue, "HareDu", "*.", x =>
{
x.Add("arg1", "value1");
});
Assert.Multiple(() =>
{
Assert.IsFalse(result.HasFaulted);
Assert.IsNotNull(result.DebugInfo);
Assert.IsNotNull(result.DebugInfo.Request);
BindingRequest request = result.DebugInfo.Request.ToObject<BindingRequest>();
Assert.AreEqual("*.", request.BindingKey);
Assert.AreEqual("value1", request.Arguments["arg1"].ToString());
});
}
[Test]
public async Task Verify_can_create_queue_binding_with_arguments2()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.CreateExchangeBindingToQueue("E2", "Q1", "HareDu", "*.", x =>
{
x.Add("arg1", "value1");
});
Assert.Multiple(() =>
{
Assert.IsFalse(result.HasFaulted);
Assert.IsNotNull(result.DebugInfo);
Assert.IsNotNull(result.DebugInfo.Request);
BindingRequest request = result.DebugInfo.Request.ToObject<BindingRequest>();
Assert.AreEqual("*.", request.BindingKey);
Assert.AreEqual("value1", request.Arguments["arg1"].ToString());
});
}
[Test]
public async Task Verify_cannot_create_exchange_binding_without_arguments1()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.Create(string.Empty, "Q1", BindingType.Exchange, "HareDu");
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(1, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_create_exchange_binding_without_arguments2()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.Create("E1", string.Empty, BindingType.Exchange, "HareDu");
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(1, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_create_exchange_binding_without_arguments3()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.CreateExchangeBinding(string.Empty, "Q1", "HareDu");
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(1, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_create_exchange_binding_without_arguments4()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.CreateExchangeBinding("E1", string.Empty, "HareDu");
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(1, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_create_queue_binding_without_arguments1()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.CreateExchangeBindingToQueue(string.Empty, "Q1", "HareDu");
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(1, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_create_queue_binding_without_arguments2()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.CreateExchangeBindingToQueue("E1", string.Empty, "HareDu");
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(1, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_create_queue_binding_without_arguments3()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.CreateExchangeBindingToQueue(string.Empty, string.Empty, "HareDu");
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(2, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_create_queue_binding_without_arguments4()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.CreateExchangeBindingToQueue("E1", "Q1", string.Empty);
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(1, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_can_delete_queue_binding1()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.Delete("E1", "Q1", string.Empty, "HareDu", BindingType.Queue);
Assert.Multiple(() =>
{
Assert.IsFalse(result.HasFaulted);
Assert.AreEqual(0, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_can_delete_queue_binding2()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.CreateExchangeBindingToQueue("E1", "Q1", "HareDu");
Assert.Multiple(() =>
{
Assert.IsFalse(result.HasFaulted);
Assert.AreEqual(0, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_can_delete_queue_binding3()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.Delete("E1", "Q1", string.Empty, "HareDu", BindingType.Exchange);
Assert.Multiple(() =>
{
Assert.IsFalse(result.HasFaulted);
Assert.AreEqual(0, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_can_delete_queue_binding4()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.CreateExchangeBinding("E1", "Q1", "HareDu");
Assert.Multiple(() =>
{
Assert.IsFalse(result.HasFaulted);
Assert.AreEqual(0, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_delete_queue_binding1()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.Delete("E2", string.Empty, string.Empty, "HareDu", BindingType.Queue);
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(1, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_delete_queue_binding2()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.DeleteQueueBinding("E2", string.Empty, string.Empty, "HareDu");
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(1, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_delete_queue_binding3()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.Delete(string.Empty, string.Empty, string.Empty, "HareDu", BindingType.Queue);
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(2, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_delete_queue_binding4()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.DeleteQueueBinding(string.Empty, string.Empty, string.Empty, "HareDu");
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(2, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_delete_queue_binding5()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.Delete(string.Empty, string.Empty, string.Empty, string.Empty, BindingType.Queue);
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(3, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_delete_queue_binding6()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.DeleteQueueBinding(string.Empty, string.Empty, string.Empty, string.Empty);
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(3, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_delete_exchange_binding1()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.Delete("E2", string.Empty, string.Empty, "HareDu", BindingType.Exchange);
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(1, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_delete_exchange_binding2()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.DeleteExchangeBinding("E2", string.Empty, string.Empty, "HareDu");
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(1, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_delete_exchange_binding3()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.Delete(string.Empty, string.Empty, string.Empty, "HareDu", BindingType.Exchange);
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(2, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_delete_exchange_binding4()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.DeleteExchangeBinding(string.Empty, string.Empty, string.Empty, "HareDu");
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(2, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_delete_exchange_binding5()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.Object<Binding>()
.Delete(string.Empty, string.Empty, string.Empty, string.Empty, BindingType.Exchange);
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(3, result.DebugInfo.Errors.Count);
});
}
[Test]
public async Task Verify_cannot_delete_exchange_binding6()
{
var services = GetContainerBuilder().BuildServiceProvider();
var result = await services.GetService<IBrokerObjectFactory>()
.DeleteExchangeBinding(string.Empty, string.Empty, string.Empty, string.Empty);
Assert.Multiple(() =>
{
Assert.IsTrue(result.HasFaulted);
Assert.AreEqual(3, result.DebugInfo.Errors.Count);
});
}
}
}
|
---
tech_name: NextJS
tech_logo: /img/nextjs.png
template_key: tech
---
|
/*
* @Author: 卓文理
* @Email: 531840344@qq.com
* @Date: 2017-09-01 17:00:53
*/
'use strict';
// This file ensures JSDOM is loaded before React is included
import 'helpers/cssModulesHook';
import 'helpers/globalJSDOM';
import nodeHookFilename from 'node-hook-filename';
process.env.DEBUG = false;
nodeHookFilename([ '.jpeg' ]);
|
//==============================================================================
// Copyright (c) 2018 - Thomas Retornaz //
// thomas.retornaz@mines-paris.org //
// Distributed under the Boost Software License, Version 1.0. //
// See accompanying file LICENSE.txt or copy at //
// http://www.boost.org/LICENSE_1_0.txt //
//==============================================================================
#ifndef POUTRE_IPSECORE_HPP__
#define POUTRE_IPSECORE_HPP__
#include <poutreBase/poutreConfig.hpp>
#include <poutreBase/poutreTrace.hpp>
/**
* @file poutreImageProcessingSECore.hpp
* @author Thomas Retornaz
* @brief Structuring ELement Import/Export
*
*
*/
#ifdef POUTRE_DYNAMIC // defined if POUTRE is compiled as a DLL
# ifdef PoutreIPSE_EXPORTS // defined if we are building the POUTRE DLL (instead of using it)
# define IPPSE_API MODULE_EXPORT
# else
# define IPPSE_API MODULE_IMPORT
# endif // POUTRE_DLL_EXPORTS
# define IPPSE_LOCAL MODULE_LOCAL
#else // POUTRE_DLL is not defined: this means POUTRE is a static lib.
# define IPPSE_API
# define IPPSE_LOCAL
#endif // POUTRE_DLL
namespace poutre
{
// TODO Doxydoc group
}
#endif // POUTRE_IPSECORE_HPP__
|
# $Id: findmail.pl 824 2010-01-15 13:28:47Z tglase $
#-
# Copyright © 2009
# mirabilos <t.glaser@tarent.de>
# All rights reserved.
#-
# Derived from Email::Find 0.10
#
# Copyright 2000, 2001 Michael G Schwern <schwern@pobox.com>.
# All rights reserved.
#
# Current maintainer is Tatsuhiko Miyagawa <miyagawa@bulknews.net>.
#
# This module is free software; you may redistribute it and/or modify it
# under the same terms as Perl itself.
use strict;
# Need qr//.
require 5.005;
# This is the BNF from RFC 822
my $esc = '\\\\';
my $period = '\.';
my $space = '\040';
my $open_br = '\[';
my $close_br = '\]';
my $nonASCII = '\x80-\xff';
my $ctrl = '\000-\037';
my $cr_list = '\n\015';
my $qtext = qq/[^$esc$nonASCII$cr_list\"]/; #"
my $dtext = qq/[^$esc$nonASCII$cr_list$open_br$close_br]/;
my $quoted_pair = qq<$esc>.qq<[^$nonASCII]>;
my $atom_char = qq/[^($space)<>\@,;:\".$esc$open_br$close_br$ctrl$nonASCII]/; #"
my $atom = qq<$atom_char+(?!$atom_char)>;
my $quoted_str = qq<\"$qtext*(?:$quoted_pair$qtext*)*\">; #"
my $word = qq<(?:$atom|$quoted_str)>;
my $local_part = qq<$word(?:$period$word)*>;
# This is a combination of the domain name BNF from RFC 1035 plus the
# domain literal definition from RFC 822, but allowing domains starting
# with numbers.
my $label = q/[A-Za-z\d](?:[A-Za-z\d-]*[A-Za-z\d])?/;
my $domain_ref = qq<$label(?:$period$label)*>;
my $domain_lit = qq<$open_br(?:$dtext|$quoted_pair)*$close_br>;
my $domain = qq<(?:$domain_ref|$domain_lit)>;
# Finally, the address-spec regex (more or less)
my $Addr_spec_re = qr<$local_part\s*\@\s*$domain>;
sub matched {
my $orig_match = $1;
my $end_cruft = '';
if( $orig_match =~ s|([),.'";?!]+)$|| ) { #"')){
$end_cruft = $1;
}
print $orig_match."\n";
$end_cruft;
}
while (<>) {
my $r_text = $_;
$r_text =~ s{($Addr_spec_re)}{
matched($1);
}eg;
}
|
RSpec.describe Metasploit::Model::Search::Operator::Group::Base, type: :model do
subject(:operator) do
described_class.new
end
let(:formatted_value) do
'formatted_value'
end
context '#children' do
subject(:children) do
operator.children(formatted_value)
end
it 'should be abstract' do
expect {
children
}.to raise_error(NotImplementedError)
end
end
context '#operate_on' do
subject(:operation) do
operator.operate_on(formatted_value)
end
#
# lets
#
let(:children) do
[
invalid_child,
valid_child
]
end
let(:valid_child) do
double('Valid Child', valid?: true)
end
let(:invalid_child) do
double('Invalid Child', valid?: false)
end
#
# Callbacks
#
before(:example) do
allow(operator).to receive(:children).and_return(children)
end
it { is_expected.to be_a Metasploit::Model::Search::Operation::Group::Base }
context 'children' do
subject(:operation_children) do
operation.children
end
it 'rejected invalid children' do
expect(operation_children).not_to include(invalid_child)
end
it 'includes valid children' do
expect(operation_children).to include(valid_child)
end
end
context 'operator' do
subject(:operation_operator) do
operation.operator
end
it 'should be the operator itself' do
expect(operation_operator).to eq(operator)
end
end
context 'value' do
subject(:value) do
operation.value
end
it 'should be formatted value' do
expect(value).to eq(formatted_value)
end
end
end
end
|
#!/usr/bin/perl
use strict;
use warnings;
#Get data names
opendir (DIR, ".\/Results\/4_1_Annotation") or die ("error:$!");
my @read = readdir DIR;
my %file;
foreach (@read) {
if ($_ =~ /(.+)_Representative_seq/){$file{$1}++;}
}
closedir DIR;
print "============================================================\n";
print " 5_1_Fasta_for_Phylogenetic_Analysis \n";
print "============================================================\n";
#Annotation
mkdir ".\/Results\/5_1_Fasta_for_Phylogenetic_Analysis";
my (%merged, %all);
foreach(sort keys %file){
my $file = $_;
open (DATA, "<", "./Results/4_1_Annotation\/${file}_Representative_seq.fas") or die("error:$!");
my $fname;
while(<DATA>){
chomp($_);
if($_ =~ /^>(.+)/){$fname = $1 . "_$file";}
else{$merged{$_}{$fname}++; $all{$fname} = $_;}
}
close(DATA);
}
open (OUT, ">", ".\/Results\/5_1_Fasta_for_Phylogenetic_Analysis\/all_representative_seqs.fas") or die("error:$!");
foreach(sort keys %all){print OUT ">$_\n$all{$_}\n";}
close(OUT);
open (OUT2, ">", ".\/Results\/5_1_Fasta_for_Phylogenetic_Analysis\/merged_seq.fas") or die("error:$!");
open (OUT3, ">", ".\/Results\/5_1_Fasta_for_Phylogenetic_Analysis\/merged_list.txt") or die("error:$!");
my (@narabi1, @narabi2, %haplo);
foreach(keys %merged){
my $seq = $_;
my $hash = $merged{$_};
my %names = %$hash;
my @keys = keys %names;
my @narabikae;
foreach(sort keys %file){
my $itiji = $_;
foreach(@keys){
if($_ =~ /$itiji/){push (@narabikae, $_); last;}
}
}
my $num = @keys;
if($num > 1){
my ($temp, $temp2);
$keys[0] =~ /(.+)_Uniq\d+_\d+_reads/;
$temp = $1;
$temp =~ s/_otu\d+//;
$haplo{$temp}++;
if($haplo{$temp} > 1){
push(@narabi1, ">${temp}_h$haplo{$temp}_from_${num}_sites\n$seq\n");
$temp2 = ">${temp}_h$haplo{$temp}_from_${num}_sites\n";
}else{
push(@narabi1, ">${temp}_from_${num}_sites\n$seq\n");
$temp2 = ">${temp}_from_${num}_sites\n";
}
foreach(@narabikae){$temp2 = $temp2 . "\t$_\n";}
push(@narabi2, $temp2);
}else{
push(@narabi1, ">$keys[0]\n$seq\n");
}
}
foreach(sort @narabi1){print OUT2 "$_";}
foreach(sort @narabi2){print OUT3 "$_";}
my $count = @narabi1;
print "$count Mereged Sequences\n";
close(OUT2);
close(OUT3);
|
package nakadi.metrics.dropwizard;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import nakadi.MetricCollector;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class MetricsCollectorDropwizardTest {
private MetricRegistry metricRegistry = new MetricRegistry();
@Test
public void metricNames() {
assertEquals("event-type-thing",
MetricsCollectorDropwizard.scrubEventTypeName("event:type:thing"));
assertEquals("event-type-thing",
MetricsCollectorDropwizard.scrubEventTypeName("event type thing"));
assertEquals("event-type-thing",
MetricsCollectorDropwizard.scrubEventTypeName("event.type.thing"));
assertEquals("event--type--thing--is",
MetricsCollectorDropwizard.scrubEventTypeName("event::type..thing is"));
}
@Test
public void metrics() {
MetricsCollectorDropwizard metrics = new MetricsCollectorDropwizard("woo", metricRegistry);
metrics.duration(MetricCollector.Timer.eventSend, 10000,
TimeUnit.NANOSECONDS);
assertEquals(1, metricRegistry.getTimers().entrySet().size());
Map<String, Timer> timers = metricRegistry.getTimers();
String nameEventSendTime =
MetricsCollectorDropwizard.name("woo", MetricCollector.Timer.eventSend.path());
assertTrue(timers.containsKey(nameEventSendTime));
assertTrue(10000.0 == timers.get(nameEventSendTime).getSnapshot().getMean());
metrics.mark(MetricCollector.Meter.sent);
metrics.mark(MetricCollector.Meter.sent, 100);
Map<String, Meter> meters = metricRegistry.getMeters();
Set<Map.Entry<String, Meter>> entries = meters.entrySet();
assertEquals(1, entries.size());
String nameEventSent =
MetricsCollectorDropwizard.name("woo", MetricCollector.Meter.sent.path());
assertTrue(meters.containsKey(nameEventSent));
assertEquals(101, meters.get(nameEventSent).getCount());
metrics.mark(MetricCollector.Meter.http409);
meters = metricRegistry.getMeters();
entries = meters.entrySet();
assertEquals(2, entries.size());
String name409 =
MetricsCollectorDropwizard.name("woo", MetricCollector.Meter.http409.path());
assertTrue(meters.containsKey(name409));
assertTrue(1 == meters.get(name409).getCount());
}
}
|
-- |
-- Module : Ch03.MeanList
-- Description : Exercise 3 mean of a list
-- Copyright : erlnow 2020 - 2030
-- License : BSD3
--
-- Maintainer : erlestau@gmail.com
-- Stability : experimental
-- Portability : unknown
--
-- Exercise 3 from Chaper 3: Defining Types, Streamlining Functions
module Ch03.MeanList where
-- *Exercise 3, p. 69
--
-- $ex3
--
-- Write a function that computes the mean of a list, i.e., the sum of all
-- elements in the list divided by its length. (You may need to use
-- 'fromIntegral' function to convert length of a list from an integer into a
-- floating-point number.
--
-- >>> mean [4.0, 5.0, 6.0, 3.0, 7.0]
-- 5.0
mean :: [Double] -> Double
mean xs = sum xs / fromIntegral (length xs)
|
using System;
using System.Collections.Generic;
using System.Reflection;
namespace ByteFlow.Protocol
{
internal class ByteProtoTargetDescriptor
{
public Type Type { get; }
public ByteProtoEntityAttribute EntityAttribute { get; }
public List<ByteProtoTargetPropertyDescriptor> PropertyDescriptors { get; }
public ByteProtoTargetDescriptor(Type targetType, ByteProtoEntityAttribute attr)
{
this.Type = targetType;
this.EntityAttribute = attr;
this.PropertyDescriptors = new List<ByteProtoTargetPropertyDescriptor>();
var properties = targetType.GetProperties();
foreach (var p in properties)
{
var memAttr = p.GetCustomAttribute<ByteProtoMemberAttribute>();
if (memAttr is null)
{
continue;
}
var desc = new ByteProtoTargetPropertyDescriptor(p, memAttr);
this.PropertyDescriptors.Add(desc);
}
this.PropertyDescriptors.Sort((a, b) => a.MemberAttribute.Order - b.MemberAttribute.Order);
}
}
}
|
module Govspeak
class TemplateRenderer
attr_reader :template, :locale
def initialize(template, locale)
@template = template
@locale = locale
end
def render(locals)
template_binding = binding
locals.each { |k, v| template_binding.local_variable_set(k, v) }
erb = ERB.new(File.read(__dir__ + "/../templates/#{template}"))
erb.result(template_binding)
end
def t(*args)
options = args.last.is_a?(Hash) ? args.last.dup : {}
key = args.shift
I18n.t!(key, options.merge(locale: locale))
end
def format_with_html_line_breaks(string)
ERB::Util.html_escape(string || "").strip.gsub(/(?:\r?\n)/, "<br/>").html_safe
end
end
end
|
<?php
/*
* This file is part of the Yosymfony\Spress.
*
* (c) YoSymfony <http://github.com/yosymfony>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Yosymfony\Spress\Tests\Plugin;
use PHPUnit\Framework\TestCase;
use Symfony\Component\Console\Command\Command;
use Symfony\Component\Console\Output\ConsoleOutput;
use Yosymfony\Spress\Plugin\CommandPlugin;
use Yosymfony\Spress\Plugin\Environment\SymfonyCommandEnvironment;
class CommandPluginTest extends TestCase
{
public function testEmptyPlugin()
{
$commandPlugin = new CommandPlugin();
$this->assertTrue(is_array($commandPlugin->getMetas()));
$this->assertCount(0, $commandPlugin->getMetas());
}
public function testGetCommandEnvironment()
{
$commandPlugin = new CommandPlugin();
$this->assertInstanceOf('Yosymfony\Spress\Plugin\Environment\CommandEnvironmentInterface', $commandPlugin->getCommandEnvironment());
}
public function testSetCommandEnvironment()
{
$command = new Command('acme');
$command->setCode(function ($input, $output) {
$output->writeln('acme');
});
$commandPlugin = new CommandPlugin();
$commandPlugin->setCommandEnvironment(new SymfonyCommandEnvironment($command, new ConsoleOutput()));
$this->assertInstanceOf('Yosymfony\Spress\Plugin\Environment\CommandEnvironmentInterface', $commandPlugin->getCommandEnvironment());
}
/**
* @expectedException \RuntimeException
* @expectedExceptionMessage You must override the "getCommandDefinition" method in the concrete command plugin class.
*/
public function testCommandDefinitionNotOverrided()
{
$commandPlugin = new CommandPlugin();
$commandPlugin->getCommandDefinition();
}
/**
* @expectedException \RuntimeException
* @expectedExceptionMessage You must override the "executeCommand" method in the concrete command plugin class.
*/
public function testExecuteCommandNotOverrided()
{
$io = $this->getMockBuilder('Yosymfony\Spress\Core\IO\IOInterface')->getMock();
$commandPlugin = new CommandPlugin();
$commandPlugin->executeCommand($io, [], []);
}
}
|
%%%-------------------------------------------------------------------
%%% @author zhaoweiguo
%%% @copyright (C) 2019, <COMPANY>
%%% @doc
%%% 测试无限的spawn进程,会有什么情况
%%% @cmd
%%% 运行:erl> spawn_loop_infinite:loop(10000, 0).
%%% @end
%%% Created : 15. Feb 2019 6:16 PM
%%%-------------------------------------------------------------------
-module(spawn_loop_infinite).
-author("zhaoweiguo").
%% API
-export([loop/2]).
-export([do_nothing/0]).
loop(N, N) ->
io:format("stop~n");
loop(M, N) ->
Add = 32768*10,
io:format("~p;", [N]),
{ok, F} = file:open("fff"++ integer_to_list(N) ++".txt", [append]),
loop1(Add, N, F),
file:close(F),
timer:sleep(10),
loop(M, N+1).
loop1(0, _N, _F) ->
ok;
loop1(Add, N, F) ->
Pid = spawn(spawn_loop_infinite, do_nothing, []),
file:write(F, io_lib:format("[~p]:(~p)~n", [Add, Pid])),
loop1(Add-1, N, F).
do_nothing() ->
ok.
|
require 'peeptools/folder'
require 'peeptools/gopro_folder'
module Peep
class VolumeFinder
attr_reader :options
def initialize opts = {}
@options = opts
end
def volumes_folder
Folder.new(options[:volumes_folder] || '/Volumes')
end
def folder
folders.first
end
def folders
volumes_folder.folders.map do |folder|
f = Peep::GoproFolder.new(folder.full_path)
f.is_gopro? ? f : nil
end.compact
end
end
end
# class VolumeFinder
# attr_reader :options
# def initialize opts = {}
# @options = opts
# end
# def logger
# @logger ||= options[:logger] || Logger.new(STDOUT).tap do |l|
# l.level = Logger::DEBUG
# end
# end
# def matcher
# options[:volume_matcher] || CONFIG[:volume_matcher]
# end
# def folders
# @folders ||= Dir.glob('/Volumes/*')
# end
# def matching_folders
# folders.select{|f| f =~ matcher}
# end
# def folder
# return matching_folders.first if matching_folders.length == 1
# raise 'Volume not found' if matching_folders.length == 0
# raise "More than one matching volume found: #{matching_folders.inspect}"
# end
# end
|
/*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.endpoint.mvc;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import org.junit.Test;
import org.springframework.boot.actuate.endpoint.AbstractEndpoint;
import org.springframework.context.support.StaticApplicationContext;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.servlet.HandlerInterceptor;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
/**
* Tests for {@link AbstractEndpointHandlerMapping}.
*
* @author Madhura Bhave
*/
public abstract class AbstractEndpointHandlerMappingTests {
private final StaticApplicationContext context = new StaticApplicationContext();
@Test
public void securityInterceptorShouldBePresentForNonCorsRequest() throws Exception {
HandlerInterceptor securityInterceptor = mock(HandlerInterceptor.class);
TestActionEndpoint endpoint = new TestActionEndpoint(new TestEndpoint("a"));
AbstractEndpointHandlerMapping<?> mapping = new TestEndpointHandlerMapping<TestActionEndpoint>(
Collections.singletonList(endpoint));
mapping.setApplicationContext(this.context);
mapping.setSecurityInterceptor(securityInterceptor);
mapping.afterPropertiesSet();
assertThat(mapping.getHandler(request("POST", "/a")).getInterceptors())
.contains(securityInterceptor);
}
@Test
public void securityInterceptorIfNullShouldNotBeAdded() throws Exception {
TestActionEndpoint endpoint = new TestActionEndpoint(new TestEndpoint("a"));
AbstractEndpointHandlerMapping<?> mapping = new TestEndpointHandlerMapping<TestActionEndpoint>(
Collections.singletonList(endpoint));
mapping.setApplicationContext(this.context);
mapping.afterPropertiesSet();
assertThat(mapping.getHandler(request("POST", "/a")).getInterceptors()).isNull();
}
@Test
public void securityInterceptorShouldBePresentAfterCorsInterceptorForCorsRequest()
throws Exception {
HandlerInterceptor securityInterceptor = mock(HandlerInterceptor.class);
TestActionEndpoint endpoint = new TestActionEndpoint(new TestEndpoint("a"));
AbstractEndpointHandlerMapping<?> mapping = new TestEndpointHandlerMapping<TestActionEndpoint>(
Collections.singletonList(endpoint));
mapping.setApplicationContext(this.context);
mapping.setSecurityInterceptor(securityInterceptor);
mapping.afterPropertiesSet();
MockHttpServletRequest request = request("POST", "/a");
request.addHeader("Origin", "http://example.com");
assertThat(mapping.getHandler(request).getInterceptors().length).isEqualTo(2);
assertThat(mapping.getHandler(request).getInterceptors()[1])
.isEqualTo(securityInterceptor);
}
@Test
public void pathNotMappedWhenGetPathReturnsNull() throws Exception {
TestMvcEndpoint endpoint = new TestMvcEndpoint(new TestEndpoint("a"));
TestActionEndpoint other = new TestActionEndpoint(new TestEndpoint("b"));
AbstractEndpointHandlerMapping<?> mapping = new TestEndpointHandlerMapping<MvcEndpoint>(
Arrays.<MvcEndpoint>asList(endpoint, other));
mapping.setApplicationContext(this.context);
mapping.afterPropertiesSet();
assertThat(mapping.getHandlerMethods()).hasSize(1);
assertThat(mapping.getHandler(request("GET", "/a"))).isNull();
assertThat(mapping.getHandler(request("POST", "/b"))).isNotNull();
}
private MockHttpServletRequest request(String method, String requestURI) {
return new MockHttpServletRequest(method, requestURI);
}
private static class TestEndpoint extends AbstractEndpoint<Object> {
TestEndpoint(String id) {
super(id);
}
@Override
public Object invoke() {
return null;
}
}
private static class TestMvcEndpoint extends EndpointMvcAdapter {
TestMvcEndpoint(TestEndpoint delegate) {
super(delegate);
}
}
private static class TestActionEndpoint extends EndpointMvcAdapter {
TestActionEndpoint(TestEndpoint delegate) {
super(delegate);
}
@Override
@PostMapping
public Object invoke() {
return null;
}
}
private static class TestEndpointHandlerMapping<E extends MvcEndpoint>
extends AbstractEndpointHandlerMapping<E> {
TestEndpointHandlerMapping(Collection<E> endpoints) {
super(endpoints);
}
@Override
protected String getPath(MvcEndpoint endpoint) {
if (endpoint instanceof TestActionEndpoint) {
return super.getPath(endpoint);
}
return null;
}
}
}
|
use v6;
use Test;
plan 4;
{
my $measurements = Supply.new;
my %measured;
sub measure($test, $value) {
push %measured{$test}, $value;
}
$measurements.tap(-> $value {
measure "Measured", $value;
});
$measurements.more(1.5);
$measurements.more(2.3);
$measurements.more(4.6);
is_deeply %measured, {"Measured" => [1.5, 2.3, 4.6]}, 'supply - singular tap';
%measured = ();
$measurements.tap(-> $value {
measure "Also measured", $value;
});
$measurements.more(2.8);
is_deeply %measured, {"Measured" => [2.8], "Also measured" => [2.8]}, 'supply dual tap';
$measurements.grep(* > 4).tap(-> $value {
measure "HIGH", $value;
});
%measured = ();
$measurements.more(1.6);
is_deeply %measured, {"Measured" => [1.6], "Also measured" => [1.6]}, 'supply grep and tap';
%measured = ();
$measurements.more(4.5);
is_deeply %measured, {"Measured" => [4.5], "Also measured" => [4.5], "HIGH" => [4.5]}, 'supply grep and tap';
}
{
my $belt_raw = Supply.interval(.1).map({ rand xx 20 });
my $belt_avg = $belt_raw.map(sub (@values) {
([+] @values) / @values
});
my $belt_labeled = $belt_avg.map({ Belt => $_ });
my $samples = Supply.interval(.5).map({ rand });
my $samples_labeled = $samples.map({ Sample => $_});
my $merged = $belt_labeled.merge($samples_labeled);
## todo: use Test::Tap tap_ok
## $merged.tap(&say);
$belt_raw.done;
$samples.done;
}
|
require 'active_support/concern'
module Concerns::TwitterUser::RawAttrs
extend ActiveSupport::Concern
SAVE_KEYS = %i(
id
name
screen_name
location
description
url
protected
followers_count
friends_count
listed_count
favourites_count
utc_offset
time_zone
geo_enabled
verified
statuses_count
lang
status
profile_image_url_https
profile_banner_url
profile_link_color
suspended
verified
entities
created_at
)
REJECT_KEYS = %i(
id
screen_name
url
created_at
)
METHOD_NAME_KEYS = SAVE_KEYS.reject { |k| k.in?(REJECT_KEYS) }
TIME_ZONE_MAPPING = {
'JST' => 'Asia/Tokyo',
'GMT+9' => 'Asia/Tokyo',
'Ulaan Bataar' => 'Asia/Ulaanbaatar',
'GMT-8' => 'America/Los_Angeles',
'Kiev' => 'Europe/Kiev',
'GMT-4' => 'America/Puerto_Rico'
}
class_methods do
def collect_user_info(t_user)
t_user.symbolize_keys.slice(*SAVE_KEYS).to_json
end
end
included do
attr_accessor :raw_attrs_text
delegate *METHOD_NAME_KEYS, to: :raw_attrs
end
# A url written on profile page as a home page url
def url
return nil if entities.nil? || entities.url.nil? || entities.url.urls.nil?
urls = entities.url.urls
urls.any? ? (urls[0].expanded_url || urls[0].url) : nil
rescue => e
logger.warn "#{e.class}: #{e.message} #{entities}"
nil
end
def account_created_at
at = raw_attrs[:created_at].to_s
if time_zone.present? && at.present?
ActiveSupport::TimeZone[TIME_ZONE_MAPPING[time_zone.to_s] || time_zone.to_s].parse(at)
elsif at.present?
Time.zone.parse(at)
else
nil
end
rescue => e
logger.info "#{self.class}##{__method__}: #{e.class} #{e.message} [#{time_zone}] [#{at}]"
nil
end
def profile_not_found?
if instance_variable_defined?(:@profile_not_found)
@profile_not_found
else
raw_attrs
@profile_not_found
end
end
private
def raw_attrs
if new_record?
Hashie::Mash.new(Oj.load(raw_attrs_text, symbol_keys: true))
else
if instance_variable_defined?(:@raw_attrs)
@raw_attrs
else
profile = Efs::TwitterUser.find_by(id)&.fetch(:profile, nil)
profile = Oj.load(profile, symbol_keys: true) if profile.class == String # Fix me.
if profile && profile.class == Hash && !profile.blank?
@profile_not_found = false
return (@raw_attrs = Hashie::Mash.new(profile))
end
profile = S3::Profile.find_by(twitter_user_id: id)
if !profile.blank? && !profile[:user_info].blank?
profile = Oj.load(profile[:user_info], symbol_keys: true)
@profile_not_found = false
return (@raw_attrs = Hashie::Mash.new(profile))
end
logger.warn "Profile not found in EFS and S3. #{id} #{sprintf("%.3f sec", Time.zone.now - created_at)}"
@profile_not_found = true
@raw_attrs = Hashie::Mash.new({})
end
end
end
end
|
require 'spec_helper'
describe "WinFfi::Table", :if => SpecHelper.adapter == :win_ffi do
before :each do
window = RAutomation::Window.new(:title => "MainFormWindow")
window.button(:value => "Data Entry Form").click { RAutomation::Window.new(:title => "DataEntryForm").exists? }
end
it "#table" do
table = RAutomation::Window.new(:title => "DataEntryForm").table(:id => "personListView")
table.should exist
RAutomation::Window.wait_timeout = 0.1
expect {RAutomation::Window.new(:title => "non-existent-window").
table(:class => /SysListView32/i)}.
to raise_exception(RAutomation::UnknownWindowException)
end
it "#strings" do
table = RAutomation::Window.new(:title => "DataEntryForm").table(:id => "personListView")
table.strings.should == [
["Name", "Date of birth", "State"],
["John Doe", "12/15/1967", "FL"],
["Anna Doe", "3/4/1975", ""]
]
end
it "#select" do
table = RAutomation::Window.new(:title => "DataEntryForm").table(:id => "personListView")
table.selected?(2).should == false
table.select(2)
table.selected?(2).should == true
end
end
|
PRINT 'Update Parcels'
-- Update the parcel project number.
UPDATE p SET
p.[ProjectNumbers] = '["' + b.[ProjectNumber] +'"]'
FROM dbo.[Parcels] p
INNER JOIN #Parcels b ON b.[Id] = p.[Id]
DROP TABLE #Parcels
|
import React, { FC, useContext, useEffect, useState } from 'react';
import { IFile } from '../../interfaces/IFile';
import api from '../../services/Axios';
import { AuthContext } from '../../services/Context';
import ListFiles from './files';
const UserFiles: FC = () => {
const { user, token } = useContext(AuthContext);
const [files, setFiles] = useState<IFile[]>();
useEffect(() => {
(async () => {
const { data } = await api.get(`files/user/${user?.id}`, {
headers: {
Authorization: token as string,
}
});
setFiles(data);
})();
}, [token, user]);
return (
<>
<br />
{files?
files.map(file => <ListFiles file={file} />) : ''
}
</>
)
}
export default UserFiles;
|
import { Component, OnInit } from '@angular/core';
import { NgbActiveModal, NgbModal } from '@ng-bootstrap/ng-bootstrap';
import { ApiClientService } from '../../_services/api-client.service';
import { DataLoaderService } from 'src/app/_services/data-loader.service';
import { ModalNewFoodstuffTypeComponent } from '../modal-new-foodstuff-type/modal-new-foodstuff-type.component';
import { FormGroup, FormControl, Validators } from '@angular/forms';
@Component({
selector: 'modal-new-foodstuff',
templateUrl:'./modal.component.html'
})
export class ModalNewFoodstuffComponent implements OnInit {
newFoodstuffForm = new FormGroup({
name: new FormControl('', Validators.required),
shortName: new FormControl(''),
description: new FormControl(''),
price: new FormControl('', Validators.required),
foodstuffTypeId: new FormControl('', Validators.required)
});
constructor(
public activeModal: NgbActiveModal,
private modalService: NgbModal,
private api: ApiClientService,
public data: DataLoaderService
) {}
ngOnInit() {}
saveFoodstuff() {
console.log(this.newFoodstuffForm, this.newFoodstuffForm.valid);
if(this.newFoodstuffForm.valid) {
console.log(this.newFoodstuffForm.value);
this.api.post("foodstuffs", this.newFoodstuffForm.value).then((response: any) => {
console.log(response);
/*
iziToast.success({
title: 'Operazione avvenuta con successo',
message: `Tipologia ${response.foodstuff.name} aggiunta all'elenco.`
});
*/
}).catch((error: any) => {
console.error(error);
/*
iziToast.error({
title: 'Errore',
message: 'Si è verificato un errore non atteso. Riprovare più tardi.',
});
*/
});
this.activeModal.close();
}
}
addFoodstuffType() {
this.modalService.open(ModalNewFoodstuffTypeComponent);
}
}
|
/*
Copyright 2019 The OpenEBS Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package jiva
import (
"flag"
"strconv"
"testing"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
apis "github.com/openebs/maya/pkg/apis/openebs.io/v1alpha1"
pvc "github.com/openebs/maya/pkg/kubernetes/persistentvolumeclaim/v1alpha1"
sc "github.com/openebs/maya/pkg/kubernetes/storageclass/v1alpha1"
unstruct "github.com/openebs/maya/pkg/unstruct/v1alpha2"
"github.com/openebs/maya/tests"
"github.com/openebs/maya/tests/artifacts"
corev1 "k8s.io/api/core/v1"
storagev1 "k8s.io/api/storage/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
// auth plugins
_ "k8s.io/client-go/plugin/pkg/client/auth/gcp"
)
var (
kubeConfigPath string
replicaCount int
nsName = "default"
scName = "jiva-upgrade-sc"
openebsProvisioner = "openebs.io/provisioner-iscsi"
replicaLabel = "openebs.io/replica=jiva-replica"
ctrlLabel = "openebs.io/controller=jiva-controller"
openebsCASConfigValue = "- name: ReplicaCount\n Value: "
accessModes = []corev1.PersistentVolumeAccessMode{corev1.ReadWriteOnce}
capacity = "5G"
pvcObj *corev1.PersistentVolumeClaim
pvcName = "jiva-volume-claim"
scObj *storagev1.StorageClass
openebsURL = "https://openebs.github.io/charts/openebs-operator-0.8.2.yaml"
rbacURL = "https://raw.githubusercontent.com/openebs/openebs/master/k8s/upgrades/0.8.2-0.9.0/rbac.yaml"
crURL = "https://raw.githubusercontent.com/openebs/openebs/master/k8s/upgrades/0.8.2-0.9.0/jiva/cr.yaml"
runtaskURL = "https://raw.githubusercontent.com/openebs/openebs/master/k8s/upgrades/0.8.2-0.9.0/jiva/jiva_upgrade_runtask.yaml"
jobURL = "https://raw.githubusercontent.com/openebs/openebs/master/k8s/upgrades/0.8.2-0.9.0/jiva/volume-upgrade-job.yaml"
)
func TestSource(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Test jiva volume upgrade")
}
func init() {
flag.StringVar(&kubeConfigPath, "kubeconfig", "", "path to kubeconfig to invoke kubernetes API calls")
flag.IntVar(&replicaCount, "replicas", 1, "number of replicas to be created")
}
var ops *tests.Operations
var _ = BeforeSuite(func() {
ops = tests.NewOperations(tests.WithKubeConfigPath(kubeConfigPath))
openebsCASConfigValue = openebsCASConfigValue + strconv.Itoa(replicaCount)
By("applying openebs 0.8.2")
applyFromURL(openebsURL)
By("waiting for maya-apiserver pod to come into running state")
podCount := ops.GetPodRunningCountEventually(
string(artifacts.OpenebsNamespace),
string(artifacts.MayaAPIServerLabelSelector),
1,
)
Expect(podCount).To(Equal(1))
annotations := map[string]string{
string(apis.CASTypeKey): string(apis.JivaVolume),
string(apis.CASConfigKey): openebsCASConfigValue,
}
By("building a storageclass")
scObj, err := sc.NewBuilder().
WithName(scName).
WithAnnotations(annotations).
WithProvisioner(openebsProvisioner).Build()
Expect(err).ShouldNot(HaveOccurred(), "while building storageclass {%s}", scName)
By("creating above storageclass")
_, err = ops.SCClient.Create(scObj)
Expect(err).To(BeNil(), "while creating storageclass {%s}", scObj.Name)
By("building a pvc")
pvcObj, err = pvc.NewBuilder().
WithName(pvcName).
WithNamespace(nsName).
WithStorageClass(scName).
WithAccessModes(accessModes).
WithCapacity(capacity).Build()
Expect(err).ShouldNot(
HaveOccurred(),
"while building pvc {%s} in namespace {%s}",
pvcName,
nsName,
)
By("creating above pvc")
_, err = ops.PVCClient.WithNamespace(nsName).Create(pvcObj)
Expect(err).To(
BeNil(),
"while creating pvc {%s} in namespace {%s}",
pvcName,
nsName,
)
By("verifying controller pod count ")
controllerPodCount := ops.GetPodRunningCountEventually(nsName, ctrlLabel, 1)
Expect(controllerPodCount).To(Equal(1), "while checking controller pod count")
By("verifying replica pod count ")
replicaPodCount := ops.GetPodRunningCountEventually(nsName, replicaLabel, replicaCount)
Expect(replicaPodCount).To(Equal(replicaCount), "while checking replica pod count")
By("verifying status as bound")
status := ops.IsPVCBound(pvcName)
Expect(status).To(Equal(true), "while checking status equal to bound")
})
var _ = AfterSuite(func() {
By("deleting above pvc")
err := ops.PVCClient.Delete(pvcName, &metav1.DeleteOptions{})
Expect(err).To(
BeNil(),
"while deleting pvc {%s} in namespace {%s}",
pvcName,
nsName,
)
By("verifying controller pod count")
controllerPodCount := ops.GetPodRunningCountEventually(nsName, ctrlLabel, 0)
Expect(controllerPodCount).To(Equal(0), "while checking controller pod count")
By("verifying replica pod count")
replicaPodCount := ops.GetPodRunningCountEventually(nsName, replicaLabel, 0)
Expect(replicaPodCount).To(Equal(0), "while checking replica pod count")
By("deleting storageclass")
err = ops.SCClient.Delete(scName, &metav1.DeleteOptions{})
Expect(err).To(BeNil(), "while deleting storageclass {%s}", scName)
By("cleanup")
deleteFromURL(jobURL)
deleteFromURL(runtaskURL)
deleteFromURL(crURL)
deleteFromURL(rbacURL)
deleteFromURL(openebsURL)
By("waiting for maya-apiserver pod to terminate")
podCount := ops.GetPodRunningCountEventually(
string(artifacts.OpenebsNamespace),
string(artifacts.MayaAPIServerLabelSelector),
0,
)
Expect(podCount).To(Equal(0))
// deleting all completed pods
podList, err := ops.PodClient.
WithNamespace("default").
List(metav1.ListOptions{})
Expect(err).ShouldNot(HaveOccurred())
for _, po := range podList.Items {
if po.Status.Phase == "Succeeded" {
err = ops.PodClient.Delete(po.Name, &metav1.DeleteOptions{})
Expect(err).To(BeNil(), "while deleting completed pods")
}
}
})
func applyFromURL(url string) {
unstructList, err := unstruct.FromURL(url)
Expect(err).ShouldNot(HaveOccurred())
// Applying unstructured objects
for _, us := range unstructList.Items {
if us.Object.GetName() == "jiva-upgrade-config" {
unstructured.SetNestedStringMap(us.Object.Object, data, "data")
}
if us.Object.GetName() == "jiva-volume-upgrade" {
us.Object.SetNamespace("default")
}
err = ops.UnstructClient.Create(us.Object)
Expect(err).ShouldNot(HaveOccurred())
}
}
func deleteFromURL(url string) {
unstructList, err := unstruct.FromURL(url)
Expect(err).ShouldNot(HaveOccurred())
// Deleting unstructured objects
for _, us := range unstructList.Items {
if us.Object.GetName() == "jiva-volume-upgrade" {
us.Object.SetNamespace("default")
}
err = ops.UnstructClient.Delete(us.Object)
Expect(err).ShouldNot(HaveOccurred())
}
}
|
#
# Printer - console output helper
#
# Module: ChangeLogger
# Author: Vladimir Strackovski <vladimir.strackovski@dlabs.si>
# Year: 2018
#
package Printer;
use strict;
use warnings;
use Term::ANSIColor ('color');
our $VERSION = "0.1.0";
sub new {
my ( $class, $args ) = @_;
my $self = {
verbose => $args->{verbose} || 0,
debug => $args->{debug} || 0,
lineLength => $args->{debug} || 58
};
return bless $self, $class;
}
sub verbose {
my ( $self, $content ) = @_;
if ( defined $self->{verbose} and $self->{verbose} == 1 ) {
$self->print_line( $content, 'BRIGHT_BLACK' );
}
}
sub error {
my ( $self, $content, $color, $compact ) = @_;
$color = defined $color ? $color : 'bright_red';
$compact = defined $compact && $compact == 1 ? "" : "\n";
$self->print_line( $compact . '✗ ' . $content . $compact, $color );
}
sub info {
my ( $self, $content, $color ) = @_;
$color = defined $color ? $color : 'bright_magenta';
$self->print_line( '➜ ' . $content, $color );
}
sub warning {
my ( $self, $content, $color ) = @_;
$color = defined $color ? $color : 'bright_red';
$self->print_line( "** " . $content . " **", $color );
}
sub print_separator {
my ( $self, $char, $color ) = @_;
my $line = $char;
for ( my $i = 0 ; $i <= $self->{lineLength} ; $i++ ) {
$line = $line . $char;
}
print color($color), $line . "\n", color("reset");
}
sub print_line_with_sep {
my ( $self, $text, $sep, $sepColor, $textColor ) = @_;
print color($textColor), $text, "\n", color("reset");
$self->print_separator( $sep, $sepColor );
}
sub print_header {
my $self = shift;
print color("bright_yellow"), PAR::read_file('header.txt'), "\n", color("reset");
}
sub print_line {
my ( $self, $text, $color ) = @_;
if ($color) {
print color($color), $text, "\n", color("reset");
}
else {
print color("reset"), $text, "\n";
}
}
sub print_color {
my ( $self, $text, $color, $currentLength ) = @_;
if ($currentLength) {
$currentLength = 60 - $currentLength;
$text = sprintf '%' . $currentLength . 's', $text;
}
if ( $color && $color ne '' ) {
print color($color), $text, color("reset");
}
else {
print color("reset"), $text;
}
return length $text;
}
sub print_options {
my ( $self, $withHeader ) = @_;
if ( defined $withHeader ) {
$self->print_header();
}
$self->print_separator( '*', 'cyan' );
$self->print_line_with_sep( ' ➜ Usage:', '-', 'cyan', 'yellow' );
print "\n clogger <RELEASE_TYPE> [-dir] [-strategy]\n\n Where release type is one of:\n\n";
print " ➜ major\n ➜ minor\n ➜ patch (alias hotfix)\n\n";
$self->print_line_with_sep( ' ➜ Options:', '-', 'cyan', 'yellow' );
print "\n -dir Absolute path to project directory.\n Defaults to current directory.\n\n";
print " -strategy Change detection strategy:\n";
print " ➜ tag: all commits since last tag (default). \n";
print " ➜ commit: all commits since given commit. \n\n";
$self->print_line_with_sep( ' ➜ Examples:', '-', 'cyan', 'yellow' );
print " clogger major\n";
print " clogger minor -dir=/home/project -strategy=commit\n";
$self->print_separator( '-', 'cyan' );
}
1;
|
import 'package:iheart_festival/schedule/ListItem.dart';
class LocalInfoItemData implements ListItem {
final String date;
final String stage;
final String venue;
const LocalInfoItemData(this.date, this.stage, this.venue);
}
|
# frozen_string_literal: true
module Cucumber
module Formatter
module Duration
# Helper method for formatters that need to
# format a duration in seconds to the UNIX
# <tt>time</tt> format.
def format_duration(seconds)
m, s = seconds.divmod(60)
"#{m}m#{format('%<seconds>.3f', seconds: s)}s"
end
end
end
end
|
import { h } from 'preact';
import Polyline from '../basic-shape/Polyline';
import BaseEdge from './BaseEdge';
import { EventType, SegmentDirection } from '../../constant/constant';
import { AppendInfo, ArrowInfo, IEdgeState } from '../../type/index';
import { points2PointsList } from '../../util/edge';
import { getVerticalPointOfLine } from '../../algorithm';
import Path from '../basic-shape/Path';
import { createDrag } from '../../util/drag';
import PolylineEdgeModel from '../../model/edge/PolylineEdgeModel';
type AppendAttributesType = {
d: string,
fill: string,
stroke: string,
strokeWidth: number,
strokeDasharray: string,
};
export default class PolylineEdge extends BaseEdge {
drag;
isDraging: boolean;
appendInfo: AppendInfo;
dragHandler: (ev: MouseEvent) => void;
constructor() {
super();
this.drag = createDrag({
onDragStart: this.onDragStart,
onDraging: this.onDraging,
onDragEnd: this.onDragEnd,
isStopPropagation: false,
});
}
onDragStart = () => {
const polylineModel = this.props.model as PolylineEdgeModel;
polylineModel.dragAppendStart();
};
onDraging = ({ deltaX, deltaY }) => {
const { model, graphModel } = this.props;
this.isDraging = true;
const { transformModel, editConfigModel } = graphModel;
const [curDeltaX, curDeltaY] = transformModel.fixDeltaXY(deltaX, deltaY);
const polylineModel = model as PolylineEdgeModel;
// 更新当前拖拽的线段信息
// 1、如果只允许调整中间线段调用dragAppendSimple
// 2、如果允许调整所有线段调用dragAppend
const { adjustEdgeMiddle } = editConfigModel;
if (adjustEdgeMiddle) {
this.appendInfo = polylineModel.dragAppendSimple(
this.appendInfo,
{ x: curDeltaX, y: curDeltaY },
);
} else {
this.appendInfo = polylineModel.dragAppend(this.appendInfo, { x: curDeltaX, y: curDeltaY });
}
};
onDragEnd = () => {
const { model, graphModel: { eventCenter } } = this.props;
const polylineModel = model as PolylineEdgeModel;
polylineModel.dragAppendEnd();
this.isDraging = false;
// 情况当前拖拽的线段信息
this.appendInfo = undefined;
// 向外抛出事件
eventCenter.emit(
EventType.EDGE_ADJUST,
{ data: polylineModel.getData() },
);
};
beforeDragStart = (e, appendInfo) => {
// 如果允许拖拽调整触发事件处理
if (appendInfo.dragAble) {
this.dragHandler(e);
}
// 记录当前拖拽的线段信息
this.appendInfo = appendInfo;
};
// 是否正在拖拽,在折线调整时,不展示起终点的调整点
getIsDraging = () => this.isDraging;
getEdge() {
const { model } = this.props;
const style = model.getEdgeStyle();
return (
<Polyline
points={model.points}
{
...style
}
/>
);
}
getShape() {
return (
<g>
{this.getEdge()}
</g>
);
}
getAnimation() {
const { model } = this.props;
const { stroke, className, strokeDasharray } = model.getAnimation();
const style = model.getEdgeStyle();
return (
<g>
<Polyline
points={model.points}
{
...style
}
className={className}
strokeDasharray={strokeDasharray}
stroke={stroke}
/>
</g>
);
}
getArrowInfo(): ArrowInfo {
const { model } = this.props;
const { points, isSelected } = model;
const { hover } = this.state as IEdgeState;
const arrowInfo = {
start: null,
end: null,
hover,
isSelected,
};
const currentPositionList = points2PointsList(points);
// 两点重合时不计算起终点
if (currentPositionList.length >= 2) {
arrowInfo.start = currentPositionList[currentPositionList.length - 2];
arrowInfo.end = currentPositionList[currentPositionList.length - 1];
}
return arrowInfo;
}
getAppendAttributes(appendInfo: AppendInfo): AppendAttributesType {
const { start, end } = appendInfo;
let d;
if (start.x === end.x && start.y === end.y) {
// 拖拽过程中会出现起终点重合的情况,这时候append无法计算
d = '';
} else {
const config = {
start,
end,
offset: 10,
verticalLength: 5,
};
const startPosition = getVerticalPointOfLine({ ...config, type: 'start' });
const endPosition = getVerticalPointOfLine({ ...config, type: 'end' });
d = `M${startPosition.leftX} ${startPosition.leftY}
L${startPosition.rightX} ${startPosition.rightY}
L${endPosition.rightX} ${endPosition.rightY}
L${endPosition.leftX} ${endPosition.leftY} z`;
}
return {
d,
fill: 'transparent',
stroke: 'transparent',
strokeWidth: 1,
strokeDasharray: '4, 4',
};
}
getAppendShape(appendInfo: AppendInfo) {
const {
d, strokeWidth, fill, strokeDasharray, stroke,
} = this.getAppendAttributes(appendInfo);
return (
<Path
d={d}
fill={fill}
strokeWidth={strokeWidth}
stroke={stroke}
strokeDasharray={strokeDasharray}
/>
);
}
getAppendWidth() {
const { model, graphModel } = this.props;
const { pointsList, draggable } = model;
const LineAppendList = [];
const pointsLen = pointsList.length;
for (let i = 0; i < pointsLen - 1; i++) {
let className = 'lf-polyline-append';
const appendInfo = {
start: {
x: pointsList[i].x,
y: pointsList[i].y,
},
end: {
x: pointsList[i + 1].x,
y: pointsList[i + 1].y,
},
startIndex: i,
endIndex: i + 1,
direction: '',
dragAble: true,
};
let append = (
<g
className={className}
>
{this.getAppendShape(appendInfo)}
</g>
);
const { editConfigModel } = graphModel;
const { adjustEdge, adjustEdgeMiddle } = editConfigModel;
if (!adjustEdge || !draggable) {
this.dragHandler = () => { };
} else {
this.dragHandler = this.drag;
const { startIndex, endIndex } = appendInfo;
// 如果不允许调整起点和终点相连的线段,设置该线段appendInfo的dragAble为false
const dragDisable = adjustEdgeMiddle && (startIndex === 0 || endIndex === pointsLen - 1);
appendInfo.dragAble = !dragDisable;
if (appendInfo.start.x === appendInfo.end.x) {
// 水平
if (appendInfo.dragAble) {
className += '-ew-resize';
}
appendInfo.direction = SegmentDirection.VERTICAL;
} else if (appendInfo.start.y === appendInfo.end.y) {
// 垂直
if (appendInfo.dragAble) {
className += '-ns-resize';
}
appendInfo.direction = SegmentDirection.HORIZONTAL;
}
append = (
<g
className={this.isDraging ? 'lf-dragging' : 'lf-drag-able'}
onMouseDown={(e) => this.beforeDragStart(e, appendInfo)}
>
<g
className={className}
>
{this.getAppendShape(appendInfo)}
</g>
</g>
);
}
LineAppendList.push(append);
}
return <g>{LineAppendList}</g>;
}
}
|
module GitDayOne
class Commit
attr_accessor :hash, :date, :msg_body, :additions, :deletions, :branches
def initialize
@msg_body = []
@additions = 0
@deletions = 0
end
def to_s
"#{hash} #{date} #{additions} #{deletions} #{msg_body}"
end
end
end
|
## Galway-Mayo Institute of Technology
## Web Applications Development Module
### ecommerce Project
#### Business Website Development Assignment
The 'business' chose: Teddy Bear store. I've employed a common design theme and colour scheme throughout. <br>
I create a business e-commerce website that employs the principles of
HTML5, CSS and JavaScript which meets the requirements specified below. <br>
##### Requirements
The website:<br>
• Allows the user to ‘purchase’ items from the site;<br>
• Allows the customer to enter their login details and have login details validated (via a login screen)
before receiving a summary of the order;<br>
• Performs form validation through JavaScript to ensure that:<br>
o text fields are not empty;<br>
o a valid email address is entered.<br>
• Connects to a database that contains relevant site information (eg., product
info);<br>
It is responsive website, that displays on standard devices from large screen monitors, to
tablets and phones.I've used the Bootstrap framework (via CDN).
*For this project, 'purchase' means that the user is presented with the possibility to choose
a product or item, select a quantity, and if the purchased button is clicked, they will be presented
with a total cost.<br>
|
# frozen_string_literal: true
class Ingredient < ApplicationRecord
include PgSearch
has_and_belongs_to_many :recipes
validates :name, uniqueness: true,
presence: true
PAGE_LIMIT = 20
default_scope -> { order(id: :desc) }
scope :page, -> (pg = 0) { limit(PAGE_LIMIT).offset(pg.to_i * PAGE_LIMIT) }
pg_search_scope(
:search,
against: :name,
using: {
tsearch: {
dictionary: "portuguese",
prefix: true
},
dmetaphone: {
any_word: true
},
trigram: {
threshold: 0.3
}
},
ignoring: :accents
)
pg_search_scope(
:fuzzy_search,
against: :name,
using: {
tsearch: {
dictionary: "portuguese",
prefix: true
},
dmetaphone: {
any_word: true
}
},
ignoring: :accents
)
pg_search_scope(
:trigram_search,
against: :name,
using: {
tsearch: {
dictionary: "portuguese",
prefix: true
},
trigram: {
threshold: 0.4
}
},
ignoring: :accents
)
def self.from_comma_list(list)
list.
split(",").
map(&:strip).
reject(&:empty?).map { |ingredient|
Ingredient.where(name: ingredient).first ||
Ingredient.fuzzy_search(ingredient).first ||
Ingredient.trigram_search(ingredient).first
}
end
end
|
const axios = require("axios");
const path = require('path')
const fs = require('fs')
exports.main = (kwargs) => {
var username = kwargs.username
var rsaPublicKeyFile = kwargs.key_path
var endpoint = `https://${kwargs.receiver}.ngrok.io/rsa-key`
if (!rsaPublicKeyFile.endsWith(".cstl.pub")) {
throw new Error(`Invalid Public Key Extension. Must end with '.cstl.pub'`)
}
LOG.info("Sending key to " + endpoint)
axios.post(endpoint, {
"username": username,
"rsaPublicKey": fs.readFileSync(rsaPublicKeyFile).toString()
}).then((response) => {
LOG.success(response.data)
process.exit(0)
}).catch((err) => {
LOG.error(err.response.data.error || err.response.data)
process.exit(0)
})
}
|
## Python requirements
It seems a bit backwards to require Python knowledge for a beginner web app
tutorial, but the amount you'll need to know is actually very little.
Python is the programming language this course is based on, and Django is the
Python framework which we'll be learning in this tutorial.
I learned the basics of Python and programming logic from Learn Python the Hard
Way (which is a very misleading title) and encourage you to do the same:
[http://learnpythonthehardway.org/](http://learnpythonthehardway.org/)
The HTML is free online and is more than sufficient, but if you wish, you can
purchase the course for $29.59 and get PDFs and videos as well.
Try to get through at least exercise 40 which'll give you the basic knowledge of
the concepts we need. Don't worry if you're not 100% confident (or even 50%),
the more you work on your web app using this tutorial, the more you'll get.
### Python concepts
Basically, you need to know enough Python to grasp these concepts:
```python
# 1. Comments (this is an example of one.)
# 2. Variables
thing = ""
# 3. Loops
for thing in list_of_things:
# Do something to thing
do_something(thing)
# 4. Conditional statements
if thing == orange_thing:
# Do something orangy
squeeze(thing)
elif thing == potato_thing:
# Do something potatoey
slice(thing)
else:
# Do something else altogether
refrigerate(thing)
```
Again, try to get through at least exercise 40 of [Learn Python the Hard
Way](http://learnpythonthehardway.org/) before picking up Hello Web App.
|
<?php
declare(strict_types=1);
namespace Edde\Hydrator;
use Edde\Filter\FilterException;
use Edde\Schema\SchemaException;
use Edde\Validator\ValidatorException;
interface IHydrator {
/**
* hydrate the given input (row, record) to (arbitrary) output
*
* @param array $source
*
* @return mixed
*
* @throws SchemaException
* @throws FilterException
*/
public function hydrate(array $source);
/**
* hydrate input (from php side to storage)
*
* @param string $name
* @param array $input
*
* @return array
*
* @throws SchemaException
* @throws FilterException
*/
public function input(string $name, array $input): array;
/**
* hydrate data for update (from php side to storage); this method should respect
* for example unset uuid (even it make no sense) as there could be generator
* bound to a value
*
* @param string $name
* @param array $update
*
* @return array
*
* @throws SchemaException
* @throws FilterException
* @throws ValidatorException
*/
public function update(string $name, array $update): array;
/**
* hydrate output (from storage to php side)
*
* @param string $name
* @param array $output
*
* @return array
*
* @throws SchemaException
* @throws FilterException
* @throws ValidatorException
*/
public function output(string $name, array $output): array;
}
|
using System.Collections.Generic;
using System.Threading.Tasks;
using JasperEngineApp.Dialogs;
using JasperEngineApp.State;
using Microsoft.Bot;
using Microsoft.Bot.Builder;
using Microsoft.Bot.Builder.Core.Extensions;
using Microsoft.Bot.Builder.Dialogs;
using Microsoft.Bot.Schema;
namespace JasperEngineApp.Bot
{
public class JasperEngineBot : IBot
{
private readonly DialogSet _dialogs;
public JasperEngineBot()
{
_dialogs = DialogHelper.CreateDialogs();
}
public async Task OnTurn(ITurnContext turnContext)
{
switch (turnContext.Activity.Type)
{
case ActivityTypes.ConversationUpdate:
await OnConversationUpdateAsync(turnContext);
break;
case ActivityTypes.Message:
await OnMessageAsync(turnContext);
break;
}
}
private async Task OnConversationUpdateAsync(ITurnContext turnContext)
{
foreach (var newMember in turnContext.Activity.MembersAdded)
{
if (newMember.Id != turnContext.Activity.Recipient.Id)
{
await turnContext.SendActivity("Bonjour, JASPER à votre service !");
}
}
}
private async Task OnMessageAsync(ITurnContext turnContext)
{
var userState = turnContext.GetUserState<UserTravelState>();
if (userState.Activities == null)
{
userState.Activities = new List<string>();
}
var state = ConversationState<Dictionary<string, object>>.Get(turnContext);
var dc = _dialogs.CreateContext(turnContext, state);
await dc.Continue();
if (!turnContext.Responded || dc.ActiveDialog == null)
{
await dc.Begin("Travel");
}
}
}
}
|
'''
Created on Mar 23, 2018
@author: Anthony
exercises for edX that is PyLint clean
'''
# test git
def remaining_balance(periodic_rate, amount, payment, months):
'''
This is the f(x) function used by the bounds and bisection search
Parameters: periodic_rate is APR / 12
amount = total loan balance
payment = amount paid each month
months = length of time
'''
for i in range(1, months + 1):
running_balance = amount
if running_balance > 0:
monthly_interest = running_balance * periodic_rate
else:
monthly_interest = 0
running_balance = running_balance + monthly_interest - payment
print(i, amount, monthly_interest, payment - monthly_interest, running_balance)
amount = running_balance
return amount
balance = 999999
annualInterestRate = 0.18
NMAX = 500
TOLERANCE = .01
def main():
'''
Main function that implements bounds and bisection search
'''
iterations = 1
monthly_rate = annualInterestRate / 12
payment_lower_bound = balance / 12
payment_upper_bound = (balance * (1 + monthly_rate) * 12) / 12
solution_found = False
while iterations <= NMAX:
payment_test = (payment_lower_bound + payment_upper_bound) / 2 # new midpoint
print(iterations, payment_lower_bound, payment_upper_bound, payment_test)
test_payment_remaining = remaining_balance(monthly_rate, balance, payment_test, 12)
# lower_bound_remaining = remaining_balance(monthly_rate,BALANCE,payment_lower_bound,12)
if(abs(test_payment_remaining) < TOLERANCE
or (payment_upper_bound - payment_lower_bound) / 2 < TOLERANCE):
solution_found = True
print("solution is payment of:", payment_test, " on iteration: ", iterations)
print("Lowest Payment: %.2f" % payment_test)
break
else:
iterations = iterations + 1
if test_payment_remaining < 0:
payment_upper_bound = payment_test
else:
payment_lower_bound = payment_test
if solution_found is False:
print("No solution found, maximum iterations exceeded")
if __name__ == "__main__":
main()
|
require 'spec_helper'
require 'sparse_array'
describe SparseArray do
describe '#append' do
it 'increments the occurrences count for the value' do
subject.append('foo')
expect(subject.store['foo']).to eq 1
subject.append('foo')
expect(subject.store['foo']).to eq 2
end
end
describe '#num_occurrences' do
it 'is the number of times a particular value has been appended to the SparseArray instance' do
subject.append('bar')
subject.append('bar')
expect(subject.num_occurrences('bar')).to eq 2
end
end
describe '.process_input' do
it 'executes the input and query operations' do
expect(SparseArray.process_input(%w[4 aba baba aba xzxb 3 aba xzxb ab])).to eq [2, 1, 0]
end
it 'works when none of the queries have any occurrences' do
expect(SparseArray.process_input(%w[2 foo bar 2 baz bot])).to eq [0, 0]
end
it 'works when there is only one query' do
expect(SparseArray.process_input(%w[2 foo bar 1 foo])).to eq [1]
end
it 'works when there is only one input string' do
expect(SparseArray.process_input(%w[1 foo 3 bar foo baz])).to eq [0, 1, 0]
end
it 'works for test case 3 by stripping each line' do
expected = File.readlines('spec/sparse_array_testcase3_expected.txt').map(&:to_i)
expect(SparseArray.process_input(File.readlines('spec/sparse_array_testcase3_input.txt'))).to eq expected
end
end
end
|
libopenstack
============
OpenStack API C binding
libopenstack is a C binding for OpenStack API using libcurl and json-c.
Plesae note that it's still very early stage.
|
import 'package:hetu_script/hetu_script.dart';
import 'package:flutter/material.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/rendering.dart';
import 'package:flutter/widgets.dart';
class ToggleButtonsThemeDataAutoBinding extends HTExternalClass {
ToggleButtonsThemeDataAutoBinding() : super('ToggleButtonsThemeData');
@override
dynamic memberGet(String varName, {String from = HTLexicon.global}) {
switch (varName) {
case 'ToggleButtonsThemeData':
return ({positionalArgs, namedArgs, typeArgs}) =>
ToggleButtonsThemeData(
textStyle: namedArgs.containsKey('textStyle')
? namedArgs['textStyle']
: null,
constraints: namedArgs.containsKey('constraints')
? namedArgs['constraints']
: null,
color:
namedArgs.containsKey('color') ? namedArgs['color'] : null,
selectedColor: namedArgs.containsKey('selectedColor')
? namedArgs['selectedColor']
: null,
disabledColor: namedArgs.containsKey('disabledColor')
? namedArgs['disabledColor']
: null,
fillColor: namedArgs.containsKey('fillColor')
? namedArgs['fillColor']
: null,
focusColor: namedArgs.containsKey('focusColor')
? namedArgs['focusColor']
: null,
highlightColor: namedArgs.containsKey('highlightColor')
? namedArgs['highlightColor']
: null,
hoverColor: namedArgs.containsKey('hoverColor')
? namedArgs['hoverColor']
: null,
splashColor: namedArgs.containsKey('splashColor')
? namedArgs['splashColor']
: null,
borderColor: namedArgs.containsKey('borderColor')
? namedArgs['borderColor']
: null,
selectedBorderColor:
namedArgs.containsKey('selectedBorderColor')
? namedArgs['selectedBorderColor']
: null,
disabledBorderColor:
namedArgs.containsKey('disabledBorderColor')
? namedArgs['disabledBorderColor']
: null,
borderRadius: namedArgs.containsKey('borderRadius')
? namedArgs['borderRadius']
: null,
borderWidth: namedArgs.containsKey('borderWidth')
? namedArgs['borderWidth']
: null);
case 'ToggleButtonsThemeData.lerp':
return ({positionalArgs, namedArgs, typeArgs}) =>
ToggleButtonsThemeData.lerp(
positionalArgs[0], positionalArgs[1], positionalArgs[2]);
default:
throw HTError.undefined(varName);
}
}
@override
dynamic instanceMemberGet(dynamic instance, String id) {
return (instance as ToggleButtonsThemeData).htFetch(id);
}
}
extension ToggleButtonsThemeDataBinding on ToggleButtonsThemeData {
dynamic htFetch(String varName) {
switch (varName) {
case 'runtimeType':
return const HTType('ToggleButtonsThemeData');
case 'textStyle':
return textStyle;
case 'constraints':
return constraints;
case 'color':
return color;
case 'selectedColor':
return selectedColor;
case 'disabledColor':
return disabledColor;
case 'fillColor':
return fillColor;
case 'focusColor':
return focusColor;
case 'highlightColor':
return highlightColor;
case 'splashColor':
return splashColor;
case 'hoverColor':
return hoverColor;
case 'borderColor':
return borderColor;
case 'selectedBorderColor':
return selectedBorderColor;
case 'disabledBorderColor':
return disabledBorderColor;
case 'borderWidth':
return borderWidth;
case 'borderRadius':
return borderRadius;
case 'hashCode':
return hashCode;
case 'copyWith':
return ({positionalArgs, namedArgs, typeArgs}) => copyWith(
textStyle: namedArgs.containsKey('textStyle')
? namedArgs['textStyle']
: null,
constraints: namedArgs.containsKey('constraints')
? namedArgs['constraints']
: null,
color: namedArgs.containsKey('color') ? namedArgs['color'] : null,
selectedColor: namedArgs.containsKey('selectedColor')
? namedArgs['selectedColor']
: null,
disabledColor: namedArgs.containsKey('disabledColor')
? namedArgs['disabledColor']
: null,
fillColor: namedArgs.containsKey('fillColor')
? namedArgs['fillColor']
: null,
focusColor: namedArgs.containsKey('focusColor')
? namedArgs['focusColor']
: null,
highlightColor: namedArgs.containsKey('highlightColor')
? namedArgs['highlightColor']
: null,
hoverColor: namedArgs.containsKey('hoverColor')
? namedArgs['hoverColor']
: null,
splashColor: namedArgs.containsKey('splashColor')
? namedArgs['splashColor']
: null,
borderColor: namedArgs.containsKey('borderColor')
? namedArgs['borderColor']
: null,
selectedBorderColor: namedArgs.containsKey('selectedBorderColor')
? namedArgs['selectedBorderColor']
: null,
disabledBorderColor: namedArgs.containsKey('disabledBorderColor')
? namedArgs['disabledBorderColor']
: null,
borderRadius: namedArgs.containsKey('borderRadius')
? namedArgs['borderRadius']
: null,
borderWidth: namedArgs.containsKey('borderWidth')
? namedArgs['borderWidth']
: null);
case 'debugFillProperties':
return ({positionalArgs, namedArgs, typeArgs}) =>
debugFillProperties(positionalArgs[0]);
case 'toStringShort':
return ({positionalArgs, namedArgs, typeArgs}) => toStringShort();
case 'toString':
return ({positionalArgs, namedArgs, typeArgs}) => toString(
minLevel: namedArgs.containsKey('minLevel')
? namedArgs['minLevel']
: DiagnosticLevel.info);
case 'toDiagnosticsNode':
return ({positionalArgs, namedArgs, typeArgs}) => toDiagnosticsNode(
name: namedArgs.containsKey('name') ? namedArgs['name'] : null,
style: namedArgs.containsKey('style') ? namedArgs['style'] : null);
default:
throw HTError.undefined(varName);
}
}
}
class ToggleButtonsThemeAutoBinding extends HTExternalClass {
ToggleButtonsThemeAutoBinding() : super('ToggleButtonsTheme');
@override
dynamic memberGet(String varName, {String from = HTLexicon.global}) {
switch (varName) {
case 'ToggleButtonsTheme':
return ({positionalArgs, namedArgs, typeArgs}) => ToggleButtonsTheme(
key: namedArgs.containsKey('key') ? namedArgs['key'] : null,
data: namedArgs['data'],
child: namedArgs['child']);
case 'ToggleButtonsTheme.of':
return ({positionalArgs, namedArgs, typeArgs}) =>
ToggleButtonsTheme.of(positionalArgs[0]);
default:
throw HTError.undefined(varName);
}
}
@override
dynamic instanceMemberGet(dynamic instance, String id) {
return (instance as ToggleButtonsTheme).htFetch(id);
}
}
extension ToggleButtonsThemeBinding on ToggleButtonsTheme {
dynamic htFetch(String varName) {
switch (varName) {
case 'runtimeType':
return const HTType('ToggleButtonsTheme');
case 'data':
return data;
case 'child':
return child;
case 'key':
return key;
case 'hashCode':
return hashCode;
case 'wrap':
return ({positionalArgs, namedArgs, typeArgs}) =>
wrap(positionalArgs[0], positionalArgs[1]);
case 'updateShouldNotify':
return ({positionalArgs, namedArgs, typeArgs}) =>
updateShouldNotify(positionalArgs[0]);
case 'createElement':
return ({positionalArgs, namedArgs, typeArgs}) => createElement();
case 'toStringShort':
return ({positionalArgs, namedArgs, typeArgs}) => toStringShort();
case 'debugFillProperties':
return ({positionalArgs, namedArgs, typeArgs}) =>
debugFillProperties(positionalArgs[0]);
case 'toStringShallow':
return ({positionalArgs, namedArgs, typeArgs}) => toStringShallow(
joiner:
namedArgs.containsKey('joiner') ? namedArgs['joiner'] : ', ',
minLevel: namedArgs.containsKey('minLevel')
? namedArgs['minLevel']
: DiagnosticLevel.debug);
case 'toStringDeep':
return ({positionalArgs, namedArgs, typeArgs}) => toStringDeep(
prefixLineOne: namedArgs.containsKey('prefixLineOne')
? namedArgs['prefixLineOne']
: '',
prefixOtherLines: namedArgs.containsKey('prefixOtherLines')
? namedArgs['prefixOtherLines']
: null,
minLevel: namedArgs.containsKey('minLevel')
? namedArgs['minLevel']
: DiagnosticLevel.debug);
case 'toDiagnosticsNode':
return ({positionalArgs, namedArgs, typeArgs}) => toDiagnosticsNode(
name: namedArgs.containsKey('name') ? namedArgs['name'] : null,
style: namedArgs.containsKey('style') ? namedArgs['style'] : null);
case 'toString':
return ({positionalArgs, namedArgs, typeArgs}) => toString(
minLevel: namedArgs.containsKey('minLevel')
? namedArgs['minLevel']
: DiagnosticLevel.info);
default:
throw HTError.undefined(varName);
}
}
}
|
import { CompletionItemKind } from 'vscode';
import { fillCompletions } from '../util';
const items = [
{
label: '_GUICtrlMenu_AddMenuItem',
documentation: 'Adds a new menu item to the end of the menu',
},
{
label: '_GUICtrlMenu_AppendMenu',
documentation:
'Appends a new item to the end of the specified menu bar, drop-down menu, submenu, or shortcut menu',
},
{
label: '_GUICtrlMenu_CalculatePopupWindowPosition',
documentation: 'Calculates an appropriate pop-up window position',
},
{
label: '_GUICtrlMenu_CheckMenuItem',
documentation:
"Sets the state of the specified menu item's check mark attribute to either selected or clear",
},
{
label: '_GUICtrlMenu_CheckRadioItem',
documentation: 'Checks a specified menu item and makes it a radio item',
},
{
label: '_GUICtrlMenu_CreateMenu',
documentation: 'Creates a menu',
},
{
label: '_GUICtrlMenu_CreatePopup',
documentation: 'Creates a drop down menu, submenu, or shortcut menu',
},
{
label: '_GUICtrlMenu_DeleteMenu',
documentation: 'Deletes an item from the specified menu',
},
{
label: '_GUICtrlMenu_DestroyMenu',
documentation: 'Destroys the specified menu and frees any memory that the menu occupies',
},
{
label: '_GUICtrlMenu_DrawMenuBar',
documentation: 'Redraws the menu bar of the specified window',
},
{
label: '_GUICtrlMenu_EnableMenuItem',
documentation: 'Enables, disables, or grays the specified menu item',
},
{
label: '_GUICtrlMenu_FindItem',
documentation: "Retrieves a menu item based on it's text",
},
{
label: '_GUICtrlMenu_FindParent',
documentation: 'Retrieves the window to which a menu belongs',
},
{
label: '_GUICtrlMenu_GetItemBmp',
documentation: 'Retrieves the bitmap displayed for the item',
},
{
label: '_GUICtrlMenu_GetItemBmpChecked',
documentation: 'Retrieves the bitmap displayed if the item is selected',
},
{
label: '_GUICtrlMenu_GetItemBmpUnchecked',
documentation: 'Retrieves the bitmap displayed if the item is not selected',
},
{
label: '_GUICtrlMenu_GetItemChecked',
documentation: 'Retrieves the status of the menu item checked state',
},
{
label: '_GUICtrlMenu_GetItemCount',
documentation: 'Retrieves the number of items in the specified menu',
},
{
label: '_GUICtrlMenu_GetItemData',
documentation: 'Retrieves the application defined value associated with the menu item',
},
{
label: '_GUICtrlMenu_GetItemDefault',
documentation: 'Retrieves the status of the menu item default state',
},
{
label: '_GUICtrlMenu_GetItemDisabled',
documentation: 'Retrieves the status of the menu item disabled state',
},
{
label: '_GUICtrlMenu_GetItemEnabled',
documentation: 'Retrieves the status of the menu item enabled state',
},
{
label: '_GUICtrlMenu_GetItemGrayed',
documentation: 'Retrieves the status of the menu item grayed state',
},
{
label: '_GUICtrlMenu_GetItemHighlighted',
documentation: 'Retrieves the status of the menu item highlighted state',
},
{
label: '_GUICtrlMenu_GetItemID',
documentation: 'Retrieves the menu item ID',
},
{
label: '_GUICtrlMenu_GetItemInfo',
documentation: 'Retrieves information about a menu item',
},
{
label: '_GUICtrlMenu_GetItemRect',
documentation: 'Retrieves the bounding rectangle for the specified menu item',
},
{
label: '_GUICtrlMenu_GetItemRectEx',
documentation: 'Retrieves the bounding rectangle for the specified menu item',
},
{
label: '_GUICtrlMenu_GetItemState',
documentation: 'Retrieves the menu item state',
},
{
label: '_GUICtrlMenu_GetItemStateEx',
documentation: 'Retrieves the menu flags associated with the specified menu item',
},
{
label: '_GUICtrlMenu_GetItemSubMenu',
documentation: 'Retrieves a the submenu activated by a specified item',
},
{
label: '_GUICtrlMenu_GetItemText',
documentation: 'Retrieves the text of the specified menu item',
},
{
label: '_GUICtrlMenu_GetItemType',
documentation: 'Retrieves the menu item type',
},
{
label: '_GUICtrlMenu_GetMenu',
documentation: 'Retrieves the handle of the menu assigned to the given window',
},
{
label: '_GUICtrlMenu_GetMenuBackground',
documentation: "Retrieves the brush to use for the menu's background",
},
{
label: '_GUICtrlMenu_GetMenuBarInfo',
documentation: 'Retrieves information about the specified menu bar',
},
{
label: '_GUICtrlMenu_GetMenuContextHelpID',
documentation: 'Retrieves the context help identifier',
},
{
label: '_GUICtrlMenu_GetMenuData',
documentation: 'Retrieves the application defined value',
},
{
label: '_GUICtrlMenu_GetMenuDefaultItem',
documentation: 'Retrieves the default menu item on the specified menu',
},
{
label: '_GUICtrlMenu_GetMenuHeight',
documentation: 'Retrieves the maximum height of a menu',
},
{
label: '_GUICtrlMenu_GetMenuInfo',
documentation: 'Retrieves information about a specified menu',
},
{
label: '_GUICtrlMenu_GetMenuStyle',
documentation: 'Retrieves the style information for a menu',
},
{
label: '_GUICtrlMenu_GetSystemMenu',
documentation: 'Allows the application to access the window menu for copying and modifying',
},
{
label: '_GUICtrlMenu_InsertMenuItem',
documentation: 'Inserts a new menu item at the specified position',
},
{
label: '_GUICtrlMenu_InsertMenuItemEx',
documentation: 'Inserts a new menu item at the specified position in a menu',
},
{
label: '_GUICtrlMenu_IsMenu',
documentation: 'Determines whether a handle is a menu handle',
},
{
label: '_GUICtrlMenu_LoadMenu',
documentation:
'Loads the specified menu resource from the executable file associated with an application instance',
},
{
label: '_GUICtrlMenu_MapAccelerator',
documentation: "Maps a menu accelerator key to it's position in the menu",
},
{
label: '_GUICtrlMenu_MenuItemFromPoint',
documentation: 'Determines which menu item is at the specified location',
},
{
label: '_GUICtrlMenu_RemoveMenu',
documentation: 'Deletes a menu item or detaches a submenu from the specified menu',
},
{
label: '_GUICtrlMenu_SetItemBitmaps',
documentation: 'Associates the specified bitmap with a menu item',
},
{
label: '_GUICtrlMenu_SetItemBmp',
documentation: 'Sets the bitmap displayed for the item',
},
{
label: '_GUICtrlMenu_SetItemBmpChecked',
documentation: 'Sets the bitmap displayed if the item is selected',
},
{
label: '_GUICtrlMenu_SetItemBmpUnchecked',
documentation: 'Sets the bitmap displayed if the item is not selected',
},
{
label: '_GUICtrlMenu_SetItemChecked',
documentation: 'Sets the checked state of a menu item',
},
{
label: '_GUICtrlMenu_SetItemData',
documentation: 'Sets the application defined value for a menu item',
},
{
label: '_GUICtrlMenu_SetItemDefault',
documentation: 'Sets the status of the menu item default state',
},
{
label: '_GUICtrlMenu_SetItemDisabled',
documentation: 'Sets the disabled state of a menu item',
},
{
label: '_GUICtrlMenu_SetItemEnabled',
documentation: 'Sets the enabled state of a menu item',
},
{
label: '_GUICtrlMenu_SetItemGrayed',
documentation: 'Sets the grayed state of a menu item',
},
{
label: '_GUICtrlMenu_SetItemHighlighted',
documentation: 'Sets the highlighted state of a menu item',
},
{
label: '_GUICtrlMenu_SetItemID',
documentation: 'Sets the menu item ID',
},
{
label: '_GUICtrlMenu_SetItemInfo',
documentation: 'Changes information about a menu item',
},
{
label: '_GUICtrlMenu_SetItemState',
documentation: 'Sets the state of a menu item',
},
{
label: '_GUICtrlMenu_SetItemSubMenu',
documentation: 'Sets the drop down menu or submenu associated with the menu item',
},
{
label: '_GUICtrlMenu_SetItemText',
documentation: 'Sets the text for a menu item',
},
{
label: '_GUICtrlMenu_SetItemType',
documentation: 'Sets the menu item type',
},
{
label: '_GUICtrlMenu_SetMenu',
documentation: 'Assigns a new menu to the specified window',
},
{
label: '_GUICtrlMenu_SetMenuBackground',
documentation: 'Sets the background brush for the menu',
},
{
label: '_GUICtrlMenu_SetMenuContextHelpID',
documentation: 'Sets the context help identifier for the menu',
},
{
label: '_GUICtrlMenu_SetMenuData',
documentation: 'Sets the application defined for the menu',
},
{
label: '_GUICtrlMenu_SetMenuDefaultItem',
documentation: 'Sets the default menu item',
},
{
label: '_GUICtrlMenu_SetMenuHeight',
documentation: 'Sets the maximum height of the menu',
},
{
label: '_GUICtrlMenu_SetMenuInfo',
documentation: 'Sets information for a specified menu',
},
{
label: '_GUICtrlMenu_SetMenuStyle',
documentation: 'Sets the menu style',
},
{
label: '_GUICtrlMenu_TrackPopupMenu',
documentation: 'Displays a shortcut menu at the specified location ',
},
];
const functions = fillCompletions(
items,
CompletionItemKind.Function,
'GuiMenu UDF - #include <GuiMenu.au3>',
);
export default functions;
|
package com.brins.baselib.database.typeconverter
import androidx.room.TypeConverter
import com.brins.baselib.module.BaseMusic
import com.brins.baselib.utils.GsonUtils
/**
* Created by lipeilin
* on 2020/10/19
*/
class SongConverter {
@TypeConverter
fun getSongFromString(value: String): BaseMusic.Song {
return GsonUtils.fromJson(value, BaseMusic.Song::class.java)
}
@TypeConverter
fun storeSongToString(value: BaseMusic.Song): String {
return GsonUtils.toJson(value)
}
}
|
package in.conceptarchitect.finance.storage;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Test;
import in.conceptarchitect.finance.BankAccount;
import in.conceptarchitect.finance.CurrentAccount;
import in.conceptarchitect.finance.OverdraftAccount;
import in.conceptarchitect.finance.SavingsAccount;
public class AccountProcessorTest {
AccountStorage storage;
String correctPassword="pass";
double amount=10000;
@Before
public void setup() {
storage=AccountStorage.getDefaultStorage(); //static method example
storage.addAccount(new SavingsAccount(1,"SA",correctPassword,amount));
storage.addAccount(new CurrentAccount(1,"CA",correctPassword,amount));
storage.addAccount(new OverdraftAccount(1,"ODA",correctPassword,amount));
}
@Test
public void defaultStorageIsHashMapAccountStorage() {
assertTrue(storage instanceof HashmapAccountStorage);
}
class Counter{
int count;
void increment() { count++ ; }
}
@Test
public void canCountTotalNumberOfAccounts() {
//final int count=0;
final var counter=new Counter();
storage.process(new Processor<BankAccount>() {
public void process(BankAccount object) {
counter.increment();
}
});
assertEquals(storage.size(), counter.count);
}
@Test
public void canCountTotalNumberOfAccountsV2() {
var counter=new Counter();
storage.process(account-> counter.increment());
//storge.process( counter::increment); //increment doesn't take parameter so it can't process(T obj)
assertEquals(storage.size(), counter.count);
}
@Test
public void canCountAllSavingsAccount() {
storage.addAccount(new SavingsAccount(1,"SA",correctPassword,amount));
final int count[]= {0};
Processor<BankAccount> accountCounter= (account) ->{
if(account instanceof SavingsAccount)
count[0]++;
};
storage.process(accountCounter);
assertEquals(2, count[0]);
}
@Test
public void canFindBalanceInAllAccount() {
//array of one double is similar to a double.
//but it is reference
final double totalBalance[]= {0};
storage.process(new Processor<BankAccount>() {
@Override
public void process(BankAccount account) {
// TODO Auto-generated method stub
totalBalance[0]+=account.getBalance();
}
});
assertEquals(amount*storage.size(), totalBalance[0],0);
}
@Test
public void canSumBalancesOfAllAccountVersion2() {
final double totalBalance[]= {0};
storage.process(account -> totalBalance[0]+=account.getBalance());
assertEquals(amount*storage.size(), totalBalance[0],0);
}
}
|
//obtenemos ruta en la que nos encontramos
var path = require("path");
//Se crea el modelo
var Sequelize = require("sequelize");
//Declaramos que haremos uso de sqlite
var sequelize= new Sequelize (null, null, null, {dialect:"sqlite", storage: "notaBD.sqlite"});
//importamos la definicion de la tabla que se encuentra en reclamoModel.js
var nota =sequelize.import (path.join(__dirname,'notaModel'));
exports.nota = nota; //se exporta la definicion
//sequelize.sync() crea e inicializa la tabla.
sequelize.sync().success (function(){
//success ejecuta un manager una vez creada la tabla
nota.count().success(function(count){
//preguntamos si la tabla esta vacia
if(count===0){
nota.create({
nombre: "Juan Perez",
notaFinal: "100"
}).success(function(){
console.log("Notas inicializado")
})
}
})
})
|
package fingerprint
import (
"crypto"
_ "crypto/sha256"
"os"
"testing"
)
func TestEncodedFingerprint(t *testing.T) {
tests := []struct {
name string
fn string
want string
options []Option
}{
{"default", "testdata/raw", "7261772d646174610a",
[]Option{},
},
{"prefix", "testdata/raw", "PREFIX:7261772d646174610a",
[]Option{WithPrefix("PREFIX:")},
},
{"sha256", "testdata/raw", "9d9b7b1f190165f8adaf15596b8d0ffd093f98dd022af12f0d214c3b55a6ed09",
[]Option{WithHash(crypto.SHA256)},
},
{"hex", "testdata/ca.der", "6908751f68290d4573ae0be39a98c8b9b7b7d4e8b2a6694b7509946626adfe98",
[]Option{WithHash(crypto.SHA256), WithEncoding(HexFingerprint)},
},
{"base64", "testdata/ca.der", "aQh1H2gpDUVzrgvjmpjIube31OiypmlLdQmUZiat/pg=",
[]Option{WithHash(crypto.SHA256), WithEncoding(Base64StdFingerprint)},
},
{"base64url", "testdata/ca.der", "aQh1H2gpDUVzrgvjmpjIube31OiypmlLdQmUZiat_pg=",
[]Option{WithHash(crypto.SHA256), WithEncoding(Base64URLFingerprint)},
},
{"base64raw", "testdata/ca.der", "aQh1H2gpDUVzrgvjmpjIube31OiypmlLdQmUZiat/pg",
[]Option{WithHash(crypto.SHA256), WithEncoding(Base64RawStdFingerprint)},
},
{"base64url-raw", "testdata/ca.der", "aQh1H2gpDUVzrgvjmpjIube31OiypmlLdQmUZiat_pg",
[]Option{WithHash(crypto.SHA256), WithEncoding(Base64RawURLFingerprint)},
},
{"emoji", "testdata/ca.der", "🚁🍎👺🚌🏮☁️🎍👀🇮🇹✋🍼🚽⛅🐼🚬🎅🇷🇺🇷🇺🚂🤢🎀💩🚁🎆👺🎨👌✔️🚸🌈⚡🐼",
[]Option{WithHash(crypto.SHA256), WithEncoding(EmojiFingerprint)},
},
{"prefix, hex", "testdata/ca.der", "PREFIX:6908751f68290d4573ae0be39a98c8b9b7b7d4e8b2a6694b7509946626adfe98",
[]Option{WithHash(crypto.SHA256), WithEncoding(HexFingerprint), WithPrefix("PREFIX:")},
},
{"prefix, base64", "testdata/ca.der", "PREFIX:aQh1H2gpDUVzrgvjmpjIube31OiypmlLdQmUZiat/pg=",
[]Option{WithHash(crypto.SHA256), WithEncoding(Base64StdFingerprint), WithPrefix("PREFIX:")},
},
{"prefix, base64url", "testdata/ca.der", "PREFIX:aQh1H2gpDUVzrgvjmpjIube31OiypmlLdQmUZiat_pg=",
[]Option{WithHash(crypto.SHA256), WithEncoding(Base64URLFingerprint), WithPrefix("PREFIX:")},
},
{"prefix, base64url-raw", "testdata/ca.der", "PREFIX:aQh1H2gpDUVzrgvjmpjIube31OiypmlLdQmUZiat_pg",
[]Option{WithHash(crypto.SHA256), WithEncoding(Base64RawURLFingerprint), WithPrefix("PREFIX:")},
},
{"prefix, base64raw", "testdata/ca.der", "PREFIX:aQh1H2gpDUVzrgvjmpjIube31OiypmlLdQmUZiat/pg",
[]Option{WithHash(crypto.SHA256), WithEncoding(Base64RawStdFingerprint), WithPrefix("PREFIX:")},
},
{"prefix, emoji", "testdata/ca.der", "PREFIX:🚁🍎👺🚌🏮☁️🎍👀🇮🇹✋🍼🚽⛅🐼🚬🎅🇷🇺🇷🇺🚂🤢🎀💩🚁🎆👺🎨👌✔️🚸🌈⚡🐼",
[]Option{WithHash(crypto.SHA256), WithEncoding(EmojiFingerprint), WithPrefix("PREFIX:")},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
input, err := os.ReadFile(tt.fn)
if err != nil {
t.Fatalf("failed to read %s: %v", tt.fn, err)
}
if got := Fingerprint(input, tt.options...); got != tt.want {
t.Errorf("EncodedFingerprint() = %v, want %v", got, tt.want)
}
})
}
}
|
-- todo_lists_view
CREATE OR REPLACE ALGORITHM = UNDEFINED
VIEW `todo_lists_view`
AS
SELECT tl.*,u.name AS user_name FROM todo_lists tl
LEFT JOIN users u ON u.id=tl.user_id;
|
; void *tshr_saddrcdown(void *saddr)
SECTION code_clib
SECTION code_arch
PUBLIC _tshr_saddrcdown_fastcall
EXTERN _zx_saddrcdown_fastcall
defc _tshr_saddrcdown_fastcall = _zx_saddrcdown_fastcall
|
class AboutController < ApplicationController
skip_before_action :restrict_non_visible_user, only: [:terms, :privacy]
def terms
@slug = "about"
@title = t("titles.terms", brand: t(:brand))
end
def privacy
@slug = "privacy"
@title = t("titles.privacy", brand: t(:brand))
end
def us
@slug = "about"
@title = t("titles.about")
end
def goodbye
@slug = "about"
@title = t("titles.goodbye")
end
def tips
@slug = "tips"
@title = t("titles.tips")
end
def well_known_apple_app_site_association
render plain: ""
end
end
|
<?php
namespace Cisse\Bundle\TraitsBundle\Model\Nullable\Boolean;
trait IsAvailableTrait
{
protected ?bool $isAvailable = false;
public function getIsAvailable(): ?bool
{
return $this->isAvailable;
}
public function setIsAvailable(?bool $isAvailable): self
{
$this->isAvailable = $isAvailable;
return $this;
}
}
|
#include <scp/Input.hpp>
#include <GLFW/glfw3.h>
#include <scp/ui/Button.hpp>
using scp::ui::Button;
Button::Button(double right, double left, double top, double bottom):
m_right(right),
m_left(left),
m_top(top),
m_bottom(bottom),
m_input(Input::getInstance())
{
}
bool Button::isClicked()
{
return isHovering() && m_input.isMouseButtonDown(GLFW_MOUSE_BUTTON_LEFT);
}
bool Button::isHovering()
{
double mouseX = m_input.getMouseX();
double mouseY = m_input.getMouseY();
bool xIntersect = (mouseX >= m_left) && (mouseX <= m_right);
bool yIntersect = (mouseY >= m_top) && (mouseY <= m_bottom);
return xIntersect && yIntersect;
}
void Button::move(double x, double y)
{
m_right += x;
m_left += x;
m_top += y;
m_bottom += y;
}
|
-- examples on alter table
use hron;
-- check the current table status
describe item;
-- add a column
alter table item add column counter decimal(65, 30);
-- drop a column
alter table item drop column counter;
-- add check
alter table item add constraint check(status in ('A', 'B', 'X'));
-- Error Code: 3819. Check constraint 'items_chk_1' is violated.
insert into item values(13, 'Y', 'ciao', 13);
insert into item (item_id, status) values(43, 'X');
-- Error Code: 3819. Check constraint 'items_chk_1' is violated.
update item set status = '?';
-- I mean it, make item empty!
delete from item;
-- check the coder table
describe coder;
select * from coder;
-- add unique constraint
alter table coder add constraint unique (first_name, last_name);
insert into coder values(1222, 'Bruce', 'Austin', '2021-12-01', 6000.00);
-- Error Code: 1062. Duplicate entry 'Bruce-Austin' for key 'coder.first_name'
insert into coder values(1223, 'Bruce', 'Austin', '2022-01-01', 6000.00);
insert into coder values(1223, 'Bruce Jr.', 'Austin', curdate(), 4000.00);
-- add pk, two ways
alter table coder add constraint primary key(coder_id);
-- alter table coder modify coder_id int primary key;
-- drop pk
alter table coder drop primary key;
-- add pk w/ autoincrement, two ways
alter table coder modify coder_id int primary key auto_increment;
-- alter table coder change coder_id coder_id int primary key auto_increment;
-- drop autoincrement from pk
alter table coder change coder_id coder_id int;
-- add fk
alter table item add constraint foreign key(coder_id) references coder(coder_id);
|
using Core.WebContent.NHibernate.Models;
using Core.WebContent.NHibernate.Static;
using FluentNHibernate.Mapping;
using Framework.Facilities.NHibernate.Filters;
namespace Core.WebContent.NHibernate.Mappings
{
public class CategoryMapping : ClassMap<WebContentCategory>
{
public CategoryMapping()
{
Cache.Region("WebContent_Categories").ReadWrite();
Table("WebContent_Categories");
Id(category => category.Id);
Map(category => category.UserId);
References(category => category.Section);
Map(category => category.Status).CustomType(typeof(CategoryStatus));
HasMany(category => category.CurrentLocales).KeyColumn("CategoryId")
.Table("CategoryLocales").AsSet().ApplyFilter<CultureFilter>()
.Access.ReadOnlyPropertyThroughCamelCaseField(Prefix.None)
.Inverse()
.LazyLoad()
.Cascade.All();
}
}
}
|
package MarkovBot::Commands;
use base qw(Exporter);
use 5.010;
use strict;
use warnings;
our @EXPORT = qw(getCommandSubs);
use FindBin qw($Bin);
use lib $Bin;
use MarkovBot::Ignore;
use MarkovBot::Config;
use MarkovBot::Redis;
use Scalar::Util qw(looks_like_number);
sub commandPing() {
return "Pong!";
}
sub commandIgnore() {
my $command = shift;
if (scalar( @{$command} ) != 2) {
return "Usage: .ignore <user>";
}
ignore($command->[1]);
return "Now ignoring ".$command->[1].".";
}
sub commandUnignore() {
my $command = shift;
if (scalar( @{$command} ) != 2) {
return "Usage: .unignore <user>";
}
unignore($command->[1]);
return "No longer ignoring ".$command->[1].".";
}
sub commandShitposting() {
my $command = shift;
my $chan = shift;
if (scalar( @{$command} ) != 2 || !looks_like_number $command->[1]
|| $command->[1] > 100 || $command->[1] < 0) {
return "Usage: .shitposting <level>";
}
my $redis = redis();
my $p = config("redis_prefix");
$redis->set("$p:".config("irc_server").":$chan:chattiness", $command->[1]);
}
sub getCommandSubs() {
return {
"ping" => \&commandPing,
"ignore" => \&commandIgnore,
"unignore" => \&commandUnignore,
"shitposting" => \&commandShitposting,
};
}
1;
|
use ip_sniffer::{scan, Arguments};
use std::sync::mpsc::channel;
use std::{env, process, thread};
fn main() {
// 1. Parse arguments
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let arguments = Arguments::new(&args).unwrap_or_else(|err| {
if err.contains("help") {
process::exit(0);
} else {
eprintln!("{} program parsing arguments -> {}", program, err);
process::exit(1);
}
});
// 2. Create multithreads to sacn ports
let num_threads = arguments.threads;
let addr = arguments.ipaddr;
let (tx, rx) = channel();
for i in 0..num_threads {
let tx = tx.clone();
thread::spawn(move || {
scan(tx, i, addr, num_threads);
});
}
// 3. Receive message and display to terminal
let mut out = vec![];
drop(tx);
for p in rx {
out.push(p);
}
println!();
out.sort_unstable();
for v in out {
println!("{} is open", v);
}
}
|
import subprocess
import os
from src.parser import popen
import pathlib
def checkRunAsRustc(file):
with open(file, "r", encoding="utf-8") as f:
lines = [x.strip('\n') for x in f.readlines()]
if len(lines) > 1:
if lines[0] == "// rustc":
return True
return False
def rust(path: str, filename: str, *args) -> int:
file_basename = os.path.basename(filename)
file_basename = file_basename.split(".")
file_basename = "".join(file_basename[:len(file_basename) - 1])
if file_basename[0] == "/" or file_basename[0] == "\\":
file_basename = file_basename[1:]
targ_path = os.path.join(path, file_basename)
# compile
if popen("rustc", "-o", targ_path, *args, filename) == 0:
# run
return popen(targ_path)
def rsCargo(path: str, filename: str, *args) -> bool:
if checkRunAsRustc(filename):
return False
parent_path = str(pathlib.Path(path).parent.absolute())
files_in_parent = os.listdir(parent_path)
if "Cargo.toml" in files_in_parent:
subprocess.call(f"cd {parent_path} && cargo run", shell=True)
return True
return False
|
/*
Copyright 2021 Measures for Justice Institute.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package io.mfj.expr
/**
* Get the variable names that are referenced in the supplied [expr].
*/
fun getVarNames( expr:Expr ):Set<String> {
val varNames:MutableSet<String> = mutableSetOf()
getVarNames( varNames, expr )
return varNames
}
/**
* Add the variable names that are referenced in the supplied [expr] to [varNames].
*/
fun getVarNames( varNames:MutableSet<String>, expr:Expr ) {
when(expr) {
is ExprConjunction -> {
expr.params.forEach { param -> getVarNames(varNames,param) }
}
is ExprLogicStatement -> {
getVarNames(varNames,expr.left)
getVarNames(varNames,expr.right)
}
else -> error("Unexpected type ${expr.javaClass}")
}
}
/**
* Add the variable names that are referenced in the supplied [value] to [varNames].
*/
fun getVarNames( varNames:MutableSet<String>, value:ExValue ) {
when(value) {
is ExValueVar -> varNames.add(value.getVariableName())
is ExValueLit -> { /* no vars in a literal */ }
is ExValueList -> value.values.forEach { v -> getVarNames(varNames,v) }
is ExValueCompound -> {
getVarNames( varNames, value.left )
getVarNames( varNames, value.right )
}
else -> error("Unexpected type ${value.javaClass}")
}
}
|
use primal_bit::BitVec;
use std::cmp;
use crate::wheel;
pub mod primes;
mod presieve;
/// A heavily optimised prime sieve.
///
/// This is a streaming segmented sieve, meaning it sieves numbers in
/// intervals, extracting whatever it needs and discarding it. See
/// `Sieve` for a wrapper that caches the information to allow for
/// repeated queries, at the cost of *O(limit)* memory use.
///
/// This uses *O(sqrt(limit))* memory, and is designed to be as
/// cache-friendly as possible. `StreamingSieve` should be used for
/// one-off calls, or simple linear iteration.
///
/// The design is *heavily* inspired/adopted from Kim Walisch's
/// [primesieve](http://primesieve.org/), and has similar speed
/// (around 5-20% slower).
///
/// # Examples
///
/// ```rust
/// let count = primal::StreamingSieve::prime_pi(123456);
/// println!("𝜋(123456) = {}", count);
/// ```
#[derive(Debug)]
pub struct StreamingSieve {
small: Option<crate::Sieve>,
sieve: BitVec,
primes: Vec<wheel::State<wheel::Wheel210>>,
small_primes: Vec<wheel::State<wheel::Wheel30>>,
large_primes: Vec<wheel::State<wheel::Wheel210>>,
presieve: presieve::Presieve,
low: usize,
current: usize,
limit: usize,
}
const CACHE: usize = 32 << 10;
const SEG_ELEMS: usize = 8 * CACHE;
const SEG_LEN: usize = SEG_ELEMS * wheel::BYTE_MODULO / wheel::BYTE_SIZE;
fn isqrt(x: usize) -> usize {
(x as f64).sqrt() as usize
}
impl StreamingSieve {
/// Create a new instance of the streaming sieve that will
/// correctly progressively filter primes up to `limit`.
pub(crate) fn new(limit: usize) -> StreamingSieve {
let low = 0;
let elems = cmp::min(wheel::bits_for(limit), SEG_ELEMS);
let presieve = presieve::Presieve::new(elems);
let current = presieve.smallest_unincluded_prime();
let small = if limit < current * current {
None
} else {
Some(crate::Sieve::new(isqrt(limit) + 1))
};
StreamingSieve {
small,
sieve: BitVec::from_elem(elems, true),
primes: vec![],
small_primes: vec![],
large_primes: vec![],
presieve,
low,
current,
limit
}
}
fn split_index(&self, idx: usize) -> (usize, usize) {
let len = SEG_ELEMS;
(idx / len,idx % len)
}
fn index_for(&self, n: usize) -> (bool, usize, usize) {
let (b, idx) = wheel::bit_index(n);
let (base, tweak) = self.split_index(idx);
(b, base, tweak)
}
/// Count the number of primes upto and including `n`, that is, 𝜋,
/// the [prime counting
/// function](https://en.wikipedia.org/wiki/Prime-counting_function).
///
/// # Examples
///
/// ```rust
/// assert_eq!(primal::StreamingSieve::prime_pi(10), 4);
/// // the endpoint is included
/// assert_eq!(primal::StreamingSieve::prime_pi(11), 5);
///
/// assert_eq!(primal::StreamingSieve::prime_pi(100), 25);
/// assert_eq!(primal::StreamingSieve::prime_pi(1000), 168);
/// ```
pub fn prime_pi(n: usize) -> usize {
match n {
0..=1 => 0,
2 => 1,
3..=4 => 2,
5..=6 => 3,
7..=10 => 4,
_ => {
let mut sieve = StreamingSieve::new(n);
let (includes, base, tweak) = sieve.index_for(n);
let mut count = match wheel::BYTE_MODULO {
30 => 3,
_ => unimplemented!()
};
for _ in 0..base {
let (_, bitv) = sieve.next().unwrap();
count += bitv.count_ones();
}
let (_, last) = sieve.next().unwrap();
count += last.count_ones_before(tweak + includes as usize);
count
}
}
}
/// Compute *p<sub>n</sub>*, the `n` prime number, 1-indexed
/// (i.e. *p<sub>1</sub>* = 2, *p<sub>2</sub>* = 3).
///
/// # Panics
///
/// `n` must be larger than 0 and less than the total number of
/// primes in this sieve (that is,
/// `self.prime_pi(self.upper_bound())`).
///
/// # Example
///
/// ```rust
/// assert_eq!(primal::StreamingSieve::nth_prime(1_000), 7919);
/// ```
pub fn nth_prime(n: usize) -> usize {
assert!(n > 0);
match n {
1 => 2,
2 => 3,
3 => 5,
_ => {
let mut bit_n = n - 3;
let (_, hi) = primal_estimate::nth_prime(n as u64);
let mut sieve = StreamingSieve::new(hi as usize);
while let Some((low, bits)) = sieve.next() {
let count = bits.count_ones();
if count >= bit_n {
let bit_idx = bits.find_nth_bit(bit_n - 1).unwrap();
return low + wheel::from_bit_index(bit_idx)
}
bit_n -= count
}
unreachable!()
}
}
}
fn add_sieving_prime(&mut self, p: usize, low: usize) {
if p <= CACHE / 2 {
self.small_primes.push(wheel::State::new(wheel::Wheel30, p, low));
} else {
let elem = wheel::State::new(wheel::Wheel210, p, low);
if p < CACHE * 5 / 2 {
self.primes.push(elem)
} else {
self.large_primes.push(elem)
}
}
}
fn find_new_sieving_primes(&mut self, low: usize, high: usize) {
if let Some(small) = self.small.take() {
for p in small.primes_from(self.current) {
if p * p >= high {
self.current = p;
break
}
self.add_sieving_prime(p, low);
}
self.small = Some(small);
}
}
fn small_primes_sieve<W: wheel::Wheel>(sieve: &mut BitVec,
small_primes: &mut [wheel::State<W>]) {
let bytes = sieve.as_bytes_mut();
for wi in small_primes {
wi.sieve_hardcoded(bytes);
}
}
fn direct_sieve(&mut self) {
let bytes = self.sieve.as_bytes_mut();
let mut chunks = self.primes.chunks_exact_mut(3);
while let Some([wi1, wi2, wi3]) = chunks.next() {
wi1.sieve_triple(wi2, wi3, bytes);
}
for wi in chunks.into_remainder() {
wi.sieve(bytes);
}
}
fn large_primes_sieve(&mut self) {
let bytes = self.sieve.as_bytes_mut();
let mut chunks = self.large_primes.chunks_exact_mut(2);
while let Some([wi1, wi2]) = chunks.next() {
wi1.sieve_pair(wi2, bytes);
}
for wi in chunks.into_remainder() {
wi.sieve(bytes);
}
}
/// Extract the next chunk of filtered primes, the return value is
/// `Some((low, v))` or `None` if the sieve has reached the limit.
///
/// The vector stores bits for each odd number starting at `low`.
/// Bit `n` of `v` is set if and only if `low + 2 * n + 1` is
/// prime.
///
/// NB. the prime 2 is not included in any of these sieves and so
/// needs special handling.
pub(crate) fn next(&mut self) -> Option<(usize, &BitVec)> {
if self.low >= self.limit {
return None
}
let low = self.low;
self.low = self.low.saturating_add(SEG_LEN);
let high = cmp::min(low.saturating_add(SEG_LEN - 1), self.limit);
self.find_new_sieving_primes(low, high);
self.presieve.apply(&mut self.sieve, low);
StreamingSieve::small_primes_sieve(&mut self.sieve, &mut self.small_primes);
self.direct_sieve();
self.large_primes_sieve();
if low == 0 {
// 1 is not prime.
self.sieve.set(0, false);
self.presieve.mark_small_primes(&mut self.sieve);
}
Some((low, &self.sieve))
}
}
#[cfg(test)]
mod tests {
use crate::Sieve;
use primal_slowsieve::Primes;
use crate::wheel;
use super::StreamingSieve;
fn gcd(x: usize, y: usize) -> usize {
if y == 0 { x }
else { gcd(y, x % y) }
}
fn coprime_to(x: usize) -> Vec<usize> {
(1..x).filter(|&n| gcd(n, x) == 1).collect()
}
#[test]
fn test() {
let coprime = coprime_to(wheel::BYTE_MODULO);
const LIMIT: usize = 2_000_000;
let mut sieve = StreamingSieve::new(LIMIT);
let primes = ::primal_slowsieve::Primes::sieve(LIMIT);
let mut base = 0;
let mut index = 0;
while let Some((_low, next)) = sieve.next() {
for val in next {
let i = wheel::BYTE_MODULO * base + coprime[index];
if i >= LIMIT { break }
assert!(primes.is_prime(i) == val,
"failed for {} (is prime = {})", i, primes.is_prime(i));
index += 1;
if index == wheel::BYTE_SIZE {
index = 0;
base += 1
}
}
}
}
#[test]
fn prime_pi() {
let (limit, mult) = if cfg!(feature = "slow_tests") {
(2_000_000, 19_998)
} else {
(200_000, 1_998)
};
let real = Primes::sieve(limit);
for i in (0..20).chain((0..100).map(|n| n * mult + 1)) {
let val = StreamingSieve::prime_pi(i);
let true_ = real.primes().take_while(|p| *p <= i).count();
assert!(val == true_, "failed for {}, true {}, computed {}",
i, true_, val)
}
}
#[test]
fn nth_prime() {
let primes = Sieve::new(2_000_000);
for (i, p) in primes.primes_from(0).enumerate() {
let n = i + 1;
if n < 2000 || n % 1000 == 0 {
assert_eq!(StreamingSieve::nth_prime(n), p);
}
}
}
// These are designed to specifically test the medium sized and
// large prime sieving.
#[test]
fn prime_pi_huge() {
#[cfg(all(feature = "slow_tests", target_pointer_width = "64"))]
const LIMIT_RESULT: (usize, usize) = (10_000_000_000, 455_052_511);
#[cfg(all(feature = "slow_tests", target_pointer_width = "32"))]
const LIMIT_RESULT: (usize, usize) = (4_294_000_000, 203_236_859);
#[cfg(not(feature = "slow_tests"))]
const LIMIT_RESULT: (usize, usize) = (500_000_000, 26_355_867);
assert_eq!(StreamingSieve::prime_pi(LIMIT_RESULT.0), LIMIT_RESULT.1);
}
#[test]
fn nth_prime_huge() {
#[cfg(all(feature = "slow_tests", target_pointer_width = "64"))]
const LIMIT_RESULT: (usize, usize) = (455_052_512, 10_000_000_019);
#[cfg(all(feature = "slow_tests", target_pointer_width = "32"))]
const LIMIT_RESULT: (usize, usize) = (203_236_860, 4_294_000_079);
#[cfg(not(feature = "slow_tests"))]
const LIMIT_RESULT: (usize, usize) = (26_355_868, 500_000_003);
assert_eq!(StreamingSieve::nth_prime(LIMIT_RESULT.0), LIMIT_RESULT.1);
}
}
|
# ~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
# MIT License
#
# Copyright (c) 2021 Nathan Juraj Michlo
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
import numpy as np
import torch
# ========================================================================= #
# Reduction Strategies #
# ========================================================================= #
def loss_reduction_sum(x: torch.Tensor) -> torch.Tensor:
return x.sum()
def loss_reduction_mean(x: torch.Tensor) -> torch.Tensor:
return x.mean()
def loss_reduction_mean_sum(x: torch.Tensor) -> torch.Tensor:
return x.reshape(x.shape[0], -1).sum(dim=-1).mean()
_LOSS_REDUCTION_STRATEGIES = {
# 'none': lambda tensor: tensor,
'sum': loss_reduction_sum,
'mean': loss_reduction_mean,
'mean_sum': loss_reduction_mean_sum,
}
def loss_reduction(tensor: torch.Tensor, reduction='mean'):
return _LOSS_REDUCTION_STRATEGIES[reduction](tensor)
# ========================================================================= #
# Reduction Strategies #
# ========================================================================= #
def get_mean_loss_scale(x: torch.Tensor, reduction: str):
# check the dimensions if given
assert 2 <= x.ndim <= 4, 'unsupported number of dims, must be one of: BxC, BxHxW, BxCxHxW'
# get the loss scaling
if reduction == 'mean_sum':
return np.prod(x.shape[1:]) # MEAN(B, SUM(C x H x W))
elif reduction == 'mean':
return 1
elif reduction == 'sum':
return np.prod(x.shape) # SUM(B x C x H x W)
else:
raise KeyError('unsupported loss reduction mode')
# ========================================================================= #
# loss batch reduction #
# ========================================================================= #
_REDUCTION_FNS = {
'mean': torch.mean,
'sum': torch.sum,
}
def batch_loss_reduction(tensor: torch.Tensor, reduction_dtype=None, reduction='mean') -> torch.Tensor:
# mean over final dims
if tensor.ndim >= 2:
tensor = torch.flatten(tensor, start_dim=1) # (B, -1)
tensor = _REDUCTION_FNS[reduction](tensor, dim=-1, dtype=reduction_dtype)
# check result
assert tensor.ndim == 1
# done
return tensor
# ========================================================================= #
# END #
# ========================================================================= #
|
# Modified from https://github.com/Homebrew/homebrew-core/blob/master/Formula/haproxy.rb
class HaproxyLibressl < Formula
desc "Reliable, high performance TCP/HTTP load balancer w/ LibreSSL"
homepage "http://www.haproxy.org/"
url "http://www.haproxy.org/download/1.7/src/haproxy-1.7.3.tar.gz"
version "1.7.3"
sha256 "ebb31550a5261091034f1b6ac7f4a8b9d79a8ce2a3ddcd7be5b5eb355c35ba65"
conflicts_with "haproxy", :because => "haproxy-libressl symlink with the name for compatibility with haproxy"
depends_on "pcre"
depends_on "libressl" => :recommended
depends_on "openssl" => :optional
def install
# USE_POLL, USE_TPROXY are implicit
args = %w[
TARGET=generic
ARCH=x86_64
CPU=native
USE_KQUEUE=1
USE_TFO=1
USE_ZLIB=1
]
pcre = Formula["pcre"]
args << "USE_REGPARM=1 USE_PCRE=1 USE_PCRE_JIT=1 USE_STATIC_PCRE=1 PCRE_LIB=#{pcre.lib} PCRE_INC=#{pcre.include}"
if build.with? "libressl"
libressl = Formula["libressl"]
cc_opt = "#{libressl.include}"
ld_opt = "#{libressl.lib}"
else
openssl = Formula["openssl"]
cc_opt = "#{openssl.include}"
ld_opt = "#{openssl.lib}"
end
args << "USE_OPENSSL=1 SSL_INC=#{cc_opt} SSL_LIB=#{ld_opt}"
# Since the Makefile.osx doesn't work due to the implicit option USE_LIBCRYPT,
# so we just build generic.
system "make", "CC=#{ENV.cc}", "CFLAGS=#{ENV.cflags}", "LDFLAGS=#{ENV.ldflags}", *args
system "make", "install", "PREFIX=#{prefix}", "DOCDIR=#{prefix}/share/doc/haproxy"
end
def caveats; <<-EOS
**IMPORTANT**: NO DEFAULT CONFIG FILE WILL BE CREATED WITH THE INSTALLTION,
please create your own at #{etc}/haproxy/haproxy.cfg.
If you would like to change the path to the config,
you will have to edit the plist file located at
#{plist_path}
EOS
end
plist_options :manual => "haproxy -f {CFG_FILE}"
def plist; <<-EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-/Apple/DTD PLIST 1.0/EN" "http:/www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<false/>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/haproxy</string>
<string>-f</string>
<string>#{etc}/haproxy/haproxy.cfg</string>
</array>
<key>UserName</key>
<string>nobody</string>
<key>WorkingDirectory</key>
<string>#{var}</string>
<key>StandardErrorPath</key>
<string>/dev/null</string>
<key>StandardOutPath</key>
<string>/dev/null</string>
</dict>
</plist>
EOS
end
test do
system "#{sbin}/haproxy", "-v"
end
end
|
package ru.job4j.market;
import java.util.Objects;
/**
* Заявка для банковского стакана.
* @author Denis Seleznev
* @version $Id$
* since 15.04.2018
*/
public class OrderBook implements Comparable {
private final int id;
private final String book;
private final String type;
private final String action;
private final double price;
int volume;
/**
* @return вид заявки для отображения.
*/
@Override
public String toString() {
String result;
if (this.action.equals("bid")) {
result = String.format("%7s%9.2f", this.volume, this.price);
} else {
result = String.format("%7s%9.2f%6s", " ", this.price, this.volume);
}
char[] charArray = result.toCharArray();
for (int i = 0; i < charArray.length; i++) {
if (charArray[i] == '.') {
charArray[i] = ',';
}
result = new String(charArray);
}
return result;
}
/**
* Конструктор, инициализирует заявку.
* @param id идентификатор.
* @param book эмитент.
* @param type тип заявки.
* @param action действие заявки.
* @param price цена заявки.
* @param volume количество акций.
*/
public OrderBook(int id, String book, String type, String action, double price, int volume) {
this.id = id;
this.book = book;
this.type = type;
this.action = action;
this.price = price;
this.volume = volume;
}
/**
* Сравнивает заявки по id
* @param o объект сравнения.
* @return результат сравнения.
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
OrderBook orderBook = (OrderBook) o;
return id == orderBook.id;
}
/**
* @return hashCode заявки по полю id.
*/
@Override
public int hashCode() {
return Objects.hash(id);
}
/**
* Позволяет сортировать заявки в естественном порядке.
* @param o объект сравнения.
* @return результат сравнения.
*/
@Override
public int compareTo(Object o) {
OrderBook item = (OrderBook) o;
int result;
if (!this.action.equals(item.action)) {
result = this.action.compareTo(item.action);
} else if (this.price != item.price) {
result = Double.compare(item.price, this.price);
} else {
result = Integer.compare(this.id, item.id);
}
return result;
}
public int getId() {
return id;
}
public String getBook() {
return book;
}
public String getType() {
return type;
}
public String getAction() {
return action;
}
public double getPrice() {
return price;
}
public int getVolume() {
return volume;
}
}
|
# AustralianFootyeXchange
A Swift backend for a new footy tipping alternative
combining tipping with Stock market trading
When signing up each trader recives x number of shares in each team.
Teams are placed into a trading halt during the match and at the end dividends are paid out according to the margin.
TODO:
[] Seeding database
[] Sign up
[] Auth
[] Distribute Stocks
[] Trade stocks
[] Dividend post match
[] Trading halt during games
|
#!/bin/bash
if [ -e download/boost_1_59_0.tar.gz ]; then
echo 'Boost is already there'
exit
fi
echo 'Downloading Boost'
mkdir -p download
cd download
wget -O boost_1_59_0.tar.gz https://sourceforge.net/projects/boost/files/boost/1.59.0/boost_1_59_0.tar.gz/download
|
{-# LANGUAGE ScopedTypeVariables, LiberalTypeSynonyms #-}
{-# LANGUAGE MultiWayIf #-}
module Example
( x
, y
, z
) where
import Stuff
-- 🍯
main :: IO ()
main = return (hello "Dude")
-- Functions
-- (ie. do things with data)
{-| Hello!
Explanation goes here.
-}
hello :: String -> String
hello name =
-- Test
"Hello " ++ name ++ "!"
foo :: Bar
foo = "bar"
|> String.toLower
|
# frozen_string_literal: true
module Drip
class Client
module ShopperActivity
# Public: Create a cart activity event.
#
# options - Required. A Hash of additional cart options. Refer to the
# Drip API docs for the required schema.
#
# Returns a Drip::Response.
# See https://developer.drip.com/#cart-activity
def create_cart_activity_event(data = {})
raise ArgumentError, 'email:, person_id:, or :visitor_uuid parameter required' if !data.key?(:email) && !data.key?(:person_id) && !data.key?(:visitor_uuid)
%i[provider action cart_id cart_url].each do |key|
raise ArgumentError, "#{key}: parameter required" unless data.key?(key)
end
data[:occurred_at] = Time.now.iso8601 unless data.key?(:occurred_at)
make_json_request :post, "v3/#{account_id}/shopper_activity/cart", data
end
# Public: Create an order activity event.
#
# options - Required. A Hash of additional order options. Refer to the
# Drip API docs for the required schema.
#
# Returns a Drip::Response.
# See https://developer.drip.com/#order-activity
def create_order_activity_event(data = {})
raise ArgumentError, 'email: or person_id: parameter required' if !data.key?(:email) && !data.key?(:person_id)
%i[provider action order_id].each do |key|
raise ArgumentError, "#{key}: parameter required" unless data.key?(key)
end
data[:occurred_at] = Time.now.iso8601 unless data.key?(:occurred_at)
make_json_request :post, "v3/#{account_id}/shopper_activity/order", data
end
# Public: Create a batch of order activity events.
#
# records - Required. An array of hashes containing orders attributes.
# Refer to the Drip API docs for the required schema.
#
# Returns a Drip::Response.
# See https://developer.drip.com/#create-or-update-a-batch-of-orders
def create_order_activity_events(records = [])
records.each_with_index do |record, i|
raise ArgumentError, "email: or person_id: parameter required in record #{i}" if !record.key?(:email) && !record.key?(:person_id)
%i[provider action order_id].each do |key|
raise ArgumentError, "#{key}: parameter required in record #{i}" unless record.key?(key)
end
record[:occurred_at] = Time.now.iso8601 unless record.key?(:occurred_at)
end
make_json_request :post, "v3/#{account_id}/shopper_activity/order/batch", { orders: records }
end
# Public: Create a product activity event.
#
# options - Required. A Hash of additional product options. Refer to the
# Drip API docs for the required schema.
#
# Returns a Drip::Response.
# See https://developer.drip.com/#product-activity
def create_product_activity_event(data = {})
%i[provider action product_id name price].each do |key|
raise ArgumentError, "#{key}: parameter required" unless data.key?(key)
end
data[:occurred_at] = Time.now.iso8601 unless data.key?(:occurred_at)
make_json_request :post, "v3/#{account_id}/shopper_activity/product", data
end
end
end
end
|
package com.payneteasy.superfly.model.ui.group;
import java.io.Serializable;
import javax.persistence.Column;
import com.payneteasy.superfly.service.mapping.MappingService;
public class UIGroupForCheckbox implements Serializable, MappingService {
private long groupId;
private String subsystemName;
private String groupName;
private String mappingStatus;
private boolean mapped;
@Column(name = "grop_id")
public long getGroupId() {
return groupId;
}
public void setGroupId(long groupId) {
this.groupId = groupId;
}
@Column(name = "subsystem_name")
public String getSubsystemName() {
return subsystemName;
}
public void setSubsystemName(String subsystemName) {
this.subsystemName = subsystemName;
}
@Column(name = "group_name")
public String getGroupName() {
return groupName;
}
public void setGroupName(String groupName) {
this.groupName = groupName;
}
@Column(name = "mapping_status")
public String getMappingStatus() {
return mappingStatus;
}
public void setMappingStatus(String mappingStatus) {
this.mappingStatus = mappingStatus;
mapped = "M".equalsIgnoreCase(mappingStatus);
}
public boolean isMapped() {
return mapped;
}
public void setMapped(boolean mapped) {
this.mapped = mapped;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (groupId ^ (groupId >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
UIGroupForCheckbox other = (UIGroupForCheckbox) obj;
if (groupId != other.groupId)
return false;
return true;
}
public long getItemId() {
return groupId;
}
public String getItemName() {
return groupName;
}
}
|
#!/bin/bash
source /home/oracle/.bashrc
cd /tmp/apex/
# $1: db_pdb_name
# $2: db_sys_pwd
# $3: apex_admin_username
# $4: apex_admin_pwd
# $5: apex_admin_email
$ORACLE_HOME/bin/sqlplus sys/$2@localhost/$1 as sysdba @apex-install.sql
$ORACLE_HOME/bin/sqlplus sys/$2@localhost/$1 as sysdba @apex-install-post.sql $3 $4 $5
|
package authtoken
import (
"github.com/mpeter/go-towerapi/towerapi/errors"
"github.com/mpeter/sling"
)
const basePath = "authtoken/"
// Service is an interface for interfacing with the
// endpoints of the Ansible Tower API
type Service struct {
sling *sling.Sling
}
// NewService handles communication with auth token related methods of the
// Ansible Tower API.
func NewService(sling *sling.Sling) *Service {
return &Service{
sling: sling.New().Path(basePath),
}
}
// Create passes credentials and returns a token
func (s *Service) Create(r *CreateRequest) (*AuthToken, error) {
token := new(AuthToken)
apierr := new(errors.APIError)
_, err := s.sling.New().Post("").BodyJSON(r).Receive(token, apierr)
return token, errors.BuildError(err, apierr)
}
|
package Spark
import org.apache.spark.sql.SparkSession
import com.mongodb.spark._
import com.mongodb.spark.sql._
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import java.util.ArrayList
import scala.collection.JavaConversions._
import org.bson.Document
import org.apache.log4j.Logger
import org.apache.log4j.Level
class SparkSessionCreator {
val conf: SparkConfig = new SparkConfig("config.yml")
def getSparkSession(funName: String, input: String, output: String): SparkSession = {
var dbCollections = Array("matchResults", "professionalGames", "publicGames", "rankedGames")
if (!(dbCollections contains input)) {
throw new IllegalArgumentException(
input + " is not a valid collection name"
)
}
if (!(dbCollections contains output)) {
throw new IllegalArgumentException(
output + " is not a valid collection name"
)
}
val spark = SparkSession
.builder()
.master("local")
.appName(conf.getAppName(funName))
.config("spark.mongodb.input.uri", conf.getCollection(input))
.config("spark.mongodb.output.uri", conf.getCollection(output))
.getOrCreate()
return spark
}
def getHeroSparkSession(): SparkSession = {
val spark = SparkSession
.builder()
.master("local")
.appName("hero-retriever")
.config("spark.mongodb.input.uri", conf.getCollection("heros"))
.config("spark.mongodb.output.uri", conf.getCollection("heros"))
.getOrCreate()
return spark
}
def getItemSparkSession(): SparkSession = {
val spark = SparkSession
.builder()
.master("local")
.appName("item-retriever")
.config("spark.mongodb.input.uri", conf.getCollection("items"))
.config("spark.mongodb.output.uri", conf.getCollection("items"))
.getOrCreate()
return spark
}
}
|
---
layout: post
title: AWS RDS Security Group Amazing Auto Generation
author: Eunchan Lee
---
I just created RDS.
and SG too. (as I posted earlier)
What first thing do you think to do after DB instance and SG been created?
Edit SG Inbound Rule
to allow only VPC IP range and my PC IP.

When I got there and noticed that AWS already done for me.
Nonetheless to say. AWS they know what to do.
Wanna buy their stock but it's too expensive. 😅
|
import PsiElement = require('nashorn/com/intellij/psi/PsiElement');
declare class PsiCompiledElement {
mirror : PsiElement;
getMirror() : PsiElement;
}
declare interface PsiCompiledElement extends PsiElement {}
export = PsiCompiledElement
|
@section('footer')
<footer class="footer">
<ul class="footer-list">
<li class="footer-item"><a href="{{route('about')}}" class="footer-link">RE:FOOD'sとは?</a></li>
<li class="footer-item"><a href="{{route('privacy')}}" class="footer-link">プライバシーポリシー</a></li>
<li class="footer-item"><a href="{{route('rule')}}" class="footer-link">利用規約</a></li>
<li class="footer-item"><a href="{{route('legal')}}" class="footer-link">特定商法取引法</a></li>
<li class="footer-item"><a href="{{route('contact')}}" class="footer-link">お問い合わせ</a></li>
</ul>
</footer>
@show
|
use crate::ast::semantic::SymbolId;
use crate::ir::CfgNodeId;
#[derive(Debug, Clone)]
pub enum CallStackItem {
Int(isize),
Bool(bool),
Addr(CfgNodeId, usize),
// StrRef
}
impl CallStackItem {
pub fn is_int(&self) -> bool {
match self {
CallStackItem::Int(_) => true,
_ => false,
}
}
pub fn is_bool(&self) -> bool {
match self {
CallStackItem::Bool(_) => true,
_ => false,
}
}
pub fn is_addr(&self) -> bool {
match self {
CallStackItem::Addr(..) => true,
_ => false,
}
}
pub fn to_int(&self) -> isize {
match self {
CallStackItem::Int(v) => *v,
_ => panic!("expected an integer"),
}
}
pub fn to_bool(&self) -> bool {
match self {
CallStackItem::Bool(v) => *v,
_ => panic!("expected a bool"),
}
}
pub fn to_addr(&self) -> (CfgNodeId, usize) {
match self {
CallStackItem::Addr(node_id, ip) => (*node_id, *ip),
_ => panic!("expected an address"),
}
}
}
#[derive(Debug)]
pub struct CallStackFrame {
items: Vec<CallStackItem>,
pub ctx_proc: SymbolId,
}
impl CallStackFrame {
pub fn new(ctx_proc: SymbolId) -> Self {
Self {
items: Vec::new(),
ctx_proc,
}
}
pub fn is_empty(&self) -> bool {
self.items.is_empty()
}
pub fn push(&mut self, item: CallStackItem) {
self.items.push(item);
}
pub fn load(&mut self, index: usize) -> &CallStackItem {
self.items.get(index).unwrap()
}
pub fn store(&mut self, index: usize, item: CallStackItem) {
std::mem::replace(&mut self.items[index], item);
}
pub fn peek(&self) -> &CallStackItem {
self.items.last().unwrap()
}
pub fn pop(&mut self) -> CallStackItem {
self.items.pop().unwrap()
}
}
#[derive(Debug)]
pub struct CallStack {
pub frames: Vec<CallStackFrame>,
}
impl CallStack {
pub fn new() -> Self {
Self { frames: Vec::new() }
}
pub fn is_empty(&self) -> bool {
self.frames.is_empty()
}
pub fn load_item(&mut self, index: usize) -> &CallStackItem {
let frame = self.current_frame_mut();
frame.load(index)
}
pub fn store_item(&mut self, index: usize, item: CallStackItem) {
let frame = self.current_frame_mut();
frame.store(index, item);
}
pub fn push_item(&mut self, item: CallStackItem) {
let frame = self.current_frame_mut();
frame.push(item);
}
pub fn pop_item(&mut self) -> CallStackItem {
let frame = self.current_frame_mut();
frame.pop()
}
pub fn peek_item(&self) -> &CallStackItem {
let frame = self.current_frame();
frame.peek()
}
pub fn open_stackframe(&mut self, ctx_proc: SymbolId) -> &mut CallStackFrame {
let frame = CallStackFrame::new(ctx_proc);
self.frames.push(frame);
self.current_frame_mut()
}
pub fn close_stackframe(&mut self) {
self.frames.pop();
}
pub fn current_frame(&self) -> &CallStackFrame {
self.frames.last().unwrap()
}
pub fn current_frame_mut(&mut self) -> &mut CallStackFrame {
self.frames.last_mut().unwrap()
}
pub fn depth(&mut self) -> usize {
self.frames.len()
}
}
#[cfg(tests)]
mod tests {
#[test]
fn vm_callstack_sanity() {
panic!()
}
}
|
---
order: 0
title:
zh-CN: 基本
en-US: Basic
---
## zh-CN
最简单的用法。
## en-US
Basic usage.
```tsx
import { Carousel } from 'antd';
import React from 'react';
const contentStyle: React.CSSProperties = {
height: '160px',
color: '#fff',
lineHeight: '160px',
textAlign: 'center',
background: '#364d79',
};
const App: React.FC = () => {
const onChange = (currentSlide: number) => {
console.log(currentSlide);
};
return (
<Carousel afterChange={onChange}>
<div>
<h3 style={contentStyle}>1</h3>
</div>
<div>
<h3 style={contentStyle}>2</h3>
</div>
<div>
<h3 style={contentStyle}>3</h3>
</div>
<div>
<h3 style={contentStyle}>4</h3>
</div>
</Carousel>
);
};
export default App;
```
|
using BabylonCore.Application.Interfaces;
namespace BabylonCore.Persistence
{
public class DatabaseService : IDatabaseService
{
public DatabaseService(IPatientRepository patientRepository)
{
PatientsRepository = patientRepository;
}
public IPatientRepository PatientsRepository { get; }
}
}
|
using System;
namespace LibSvnSharp.Implementation
{
interface IItemMarshaller<T>
{
int ItemSize { get; }
void Write(T value, IntPtr ptr, AprPool pool);
T Read(IntPtr ptr, AprPool pool);
}
}
|
using System;
using System.Collections.Generic;
using System.Reflection;
namespace Orleans.Runtime
{
/// <summary>
/// Metadata for a grain class
/// </summary>
[Serializable]
internal sealed class GrainClassData
{
[NonSerialized]
private readonly Dictionary<string, string> genericClassNames;
private readonly bool isGeneric;
internal int GrainTypeCode { get; private set; }
internal string GrainClass { get; private set; }
internal bool IsGeneric { get { return isGeneric; } }
internal GrainClassData(int grainTypeCode, string grainClass, bool isGeneric)
{
GrainTypeCode = grainTypeCode;
GrainClass = grainClass;
this.isGeneric = isGeneric;
genericClassNames = new Dictionary<string, string>(); // TODO: initialize only for generic classes
}
internal string GetClassName(string typeArguments)
{
// Knowing whether the grain implementation is generic allows for non-generic grain classes
// to implement one or more generic grain interfaces.
// For generic grain classes, the assumption that they take the same generic arguments
// as the implemented generic interface(s) still holds.
if (!isGeneric || String.IsNullOrWhiteSpace(typeArguments))
{
return GrainClass;
}
else
{
lock (this)
{
if (genericClassNames.ContainsKey(typeArguments))
return genericClassNames[typeArguments];
var className = String.Format("{0}[{1}]", GrainClass, typeArguments);
genericClassNames.Add(typeArguments, className);
return className;
}
}
}
internal long GetTypeCode(Type interfaceType)
{
if (interfaceType.IsGenericType && this.IsGeneric)
{
string args = TypeUtils.GetGenericTypeArgs(interfaceType.GetGenericArguments(), t => true);
int hash = Utils.CalculateIdHash(args);
return (((long)(hash & 0x00FFFFFF)) << 32) + GrainTypeCode;
}
else
{
return GrainTypeCode;
}
}
public override string ToString()
{
return String.Format("{0}:{1}", GrainClass, GrainTypeCode);
}
public override int GetHashCode()
{
return GrainTypeCode;
}
public override bool Equals(object obj)
{
if(!(obj is GrainClassData))
return false;
return GrainTypeCode == ((GrainClassData) obj).GrainTypeCode;
}
}
}
|
<?php
class CourseController extends BaseController
{
public function __construct()
{
$this->layout = 'layouts.default';
$this->beforeFilter('csrf', ['on' => ['post', 'put', 'delete']]);
$this->beforeFilter('auth', ['on' => ['post', 'put', 'delete']]);
}
public function Courses()
{
// $courses = Course::all();
$courses = Course::where('online', '=', 1)->get();
$this->layout->title = 'Courses';
$this->layout->content = View::make('course.courses')->with('courses', $courses);
}
public function Course($code)
{
$course = Course::where('derskod', $code)->first();
$this->layout = View::make('layouts.course')->with('course', $course);
$this->layout->title = $course->derskod;
$this->layout->content = View::make('course.course')->with('course', $course);
}
public function Agreement($code)
{
if(!Auth::check())
return Redirect::to('login');
if(!Auth::user()->hasRoles(['student', 'instructor', 'admin']))
return Redirect::to('login')->with('message', 'you are not a student or an instructor');
if(Auth::user()->hasEnrolled($code))
return Redirect::to('/inclass/'.$code);
$course = Course::where('derskod', $code)->first();
$this->layout->title = $course->derskod;
$this->layout->content = View::make('course.agreement')->with('course', $course);
}
public function AgreementPost($code)
{
if(!Auth::check())
return Redirect::to('login');
if(!Auth::user()->hasRoles(['student', 'instructor', 'admin']))
return Redirect::to('login')->with('message', 'you are not a student or an instructor');
if(Auth::user()->hasEnrolled($code))
return Redirect::to('/inclass/'.$code);
$course = Course::where('derskod', $code)->first();
Auth::user()->enroll()->attach($course->derskod);
return Redirect::to('/inclass/'.$code);
}
public function Agreementreminder($code)
{
if(!Auth::user()->hasRoles(['student', 'instructor', 'admin']))
return Redirect::to('login');
$course = Course::where('derskod', $code)->first();
$this->layout=View::make('layouts.sidebar');
$this->layout->title = $course->name;
$this->layout->content = View::make('course.agreementreminder')->with('course', $course);
}
public function Inclass($code)
{
if(!Auth::user()->hasRoles(['student', 'instructor', 'admin']))
return Redirect::to('login');
if(!Auth::user()->hasEnrolled($code))
return Redirect::to('/agreement/courses/'.$code);
$course = Course::where('code', $code)->first();
$this->layout=View::make('layouts.sidebar');
$this->layout->title = $course->name;
$this->layout->content = View::make('course.class')->with('course', $course);
}
public function Awritten($code)
{
if(!Auth::user()->hasRoles(['student', 'instructor']))
return Redirect::to('login');
$course = Course::where('code', $code)->first();
$this->layout=View::make('layouts.sidebar');
$this->layout->title = $course->name;
$this->layout->content = View::make('course.awritten')->with('course', $course);
}
public function Aprogramming($code)
{
if(!Auth::user()->hasRoles(['student', 'instructor']))
return Redirect::to('login');
$course = Course::where('code', $code)->first();
$this->layout=View::make('layouts.sidebar');
$this->layout->title = $course->name;
$this->layout->content = View::make('course.aprogramming')->with('course', $course);
}
public function Aquizes($code)
{
if(!Auth::user()->hasRoles(['student', 'instructor']))
return Redirect::to('login');
$course = Course::where('code', $code)->first();
$this->layout=View::make('layouts.sidebar');
$this->layout->title = $course->name;
$this->layout->content = View::make('course.aquizes')->with('course', $course);
}
public function Aexams($code)
{
if(!Auth::user()->hasRoles(['student', 'instructor']))
return Redirect::to('login');
$course = Course::where('code', $code)->first();
$this->layout=View::make('layouts.sidebar');
$this->layout->title = $course->name;
$this->layout->content = View::make('course.aexams')->with('course', $course);
}
public function Video($code)
{
if(!Auth::user()->hasRoles(['student', 'instructor']))
return Redirect::to('login');
$course = Course::where('code', $code)->first();
$this->layout=View::make('layouts.sidebar');
$this->layout->title = $course->name;
$this->layout->content = View::make('course.video')->with('course', $course);
}
public function Reading($code)
{
if(!Auth::user()->hasRoles(['student', 'instructor']))
return Redirect::to('login');
$course = Course::where('code', $code)->first();
$this->layout=View::make('layouts.sidebar');
$this->layout->title = $course->name;
$this->layout->content = View::make('course.reading')->with('course', $course);
}
public function Readinginfo($code)
{
$course = Course::where('derskod', $code)->first();
$this->layout = View::make('layouts.course')->with('course', $course);
$this->layout->title = $course->derskod;
$this->layout->content = View::make('course.coursereadings')->with('course', $course);
}
public function Objectives($code)
{
$course = Course::where('derskod', $code)->first();
$this->layout = View::make('layouts.course')->with('course', $course);
$this->layout->title = $course->derskod;
$this->layout->content = View::make('course.objectives')->with('course', $course);
}
public function Weeklyplan($code)
{
$course = Course::where('derskod', $code)->first();
$this->layout = View::make('layouts.course')->with('course', $course);
$this->layout->title = $course->derskod;
$this->layout->content = View::make('course.weeklyplan')->with('course', $course);
}
public function Evaluations($code)
{
$course = Course::where('derskod', $code)->first();
$this->layout = View::make('layouts.course')->with('course', $course);
$this->layout->title = $course->derskod;
$this->layout->content = View::make('course.evaluations')->with('course', $course);
}
public function Links($code)
{
$course = Course::where('derskod', $code)->first();
$this->layout = View::make('layouts.course')->with('course', $course);
$this->layout->title = $course->derskod;
$this->layout->content = View::make('course.links')->with('course', $course);
}
}
|
namespace DesignPatterns.SoftwareDesignPattern.Structural.Bridge {
public abstract class Abstractor {
private readonly IImplementor implementor;
protected Abstractor(IImplementor implementor) {
this.implementor = implementor;
}
public string DoThings() => $"Abstractor {implementor.DoStuff()}";
}
}
|
# covid19comparator
A website to compare cases and/or deaths of covid19 between countries. This
website is non-commercial and purely for educational and academic research
purposes. The data comes from the following source:
- https://github.com/CSSEGISandData/COVID-19
Copyright 2020 Johns Hopkins University
Thank you so much for the effort of compiling this data and making it
available. If you are a copyright holder and believe that I am infringing any
rights, please let me know at (dalboris gmail com).
# How to update data?
You need Python 3.6+ and BeautifulSoup 4:
```
pip3 install lxml
pip3 install beautifulsoup4
```
Then:
```
./update_data.py
```
This populates the `data` folder with updated `csv` and `json` files, which
will be automatically embedded within the final `covid19.js`.
# How to generate the website?
This combines `src/`, which is using simpler test data, with the actual data
from `data/`, and write the result as a static website in a new folder `out/`
(it is overwritten if it already exists).
```
./generate.py
```
# How to deploy the website?
This project assumes you deploy to an Apache server.
If you fork this project, you should first modify `src/.htaccess` to fit your
environment.
Then, define an SSH host called `covid19generator` in your `.ssh/config` file.
Finally, run:
```
./deploy_production.sh
```
This writes the static website to a subfolder named `covid19generator.com` of the
host. You may want to modify `deploy_production.sh` to fit your environment.
|
package com.usher.exception;
/**
* @Author: Usher
* @Description:
*/
public class SellerAuthorizeException extends RuntimeException{
}
|
# frozen_string_literal: true
require 'spec_helper'
support :test_adaptor_helpers
RSpec.describe LedgerSync::Adaptors::Test::Error::AdaptorError::Operations::ThrottleError do
include TestAdaptorHelpers
let(:error) { LedgerSync::Error::AdaptorError::ThrottleError.new(adaptor: test_adaptor) }
let(:op) do
described_class.new(
adaptor: test_adaptor,
resource: error
)
end
let(:result) { op.perform }
it { expect(result).to be_failure }
it { expect(result.error).to be_a(LedgerSync::Error::AdaptorError::ThrottleError) }
end
|
# To the extent possible under law, the author(s) have dedicated all
# copyright and neighboring rights to this software to the public domain
# worldwide. This software is distributed without any warranty. See
# <http://creativecommons.org/publicdomain/zero/1.0/> for a copy of the
# CC0 Public Domain Dedication.
from pyrcb2 import IDefaultDict
# Maps nicknames to tuples containing a voice name and a pitch.
voices = IDefaultDict(lambda: ("en-us", "70"), {
"nickname1": ("en+f3", "65"),
"nickname2": ("en-us+m5", "40"),
})
# Sent to users when they first connect to espeaker.
# XML comments are only spoken when running espeak without SSML (option -m).
connect_message = """\
<!-- If you can hear this, you are not running espeak correctly. -->
<!-- You must run espeak with option dash lowercase m. -->
"""
|
@model AdminCreateViewModel
<html>
<head>
<meta name="viewport" content="width=device-width" />
<title>Create A New Forum</title>
</head>
<body>
<form method="post">
<div>
<label asp-for="newForum.FName"></label>
<input asp-for="newForum.FName" />
<span asp-validation-for="newForum.FName"></span>
</div>
<div>
<p>Set Description of New Forum</p>
<label asp-for="initPost.PostText"></label>
<input asp-for="initPost.PostText" />
<span asp-validation-for="initPost.PostText"></span>
</div>
<label asp-for="newForum.HasLock"></label>
<input asp-for="newForum.HasLock" typeof="checkbox" value="true" checked>
@*<option>Please select one</option>*@
@*</input>*@
<div>
<input type="submit" name="Save" value="Save" />
</div>
</form>
</body>
</html>
|
#!/bin/bash
set -euo pipefail
_DEPTH=1
_FILE=${BASH_SOURCE[0]}
lk_die() { s=$? && echo "$_FILE: $1" >&2 && (exit $s) && false || exit; }
{ type -P realpath || { type -P python && realpath() { python -c \
"import os,sys;print(os.path.realpath(sys.argv[1]))" "$1"; }; }; } \
>/dev/null || lk_die "command not found: realpath"
_FILE=$(realpath "$_FILE") && _DIR=${_FILE%/*} &&
LK_BASE=$(realpath "$_DIR$(eval printf '/..%.s' $(seq 1 "$_DEPTH"))") &&
[ -d "$LK_BASE/lib/bash" ] ||
lk_die "unable to locate LK_BASE"
export LK_BASE
. "$LK_BASE/lib/bash/common.sh"
lk_include backup mail mysql
! lk_is_linux ||
lk_include linux
function exit_trap() {
local STATUS=$? MESSAGE TAR SUBJECT
[ "$BASH_SUBSHELL" -eq 0 ] || return
exec 8>&- &&
rm -Rf "${FIFO_FILE%/*}" || true
lk_log_close -r
[ -z "$LK_BACKUP_MAIL" ] ||
{ [ "$STATUS" -eq 0 ] &&
[ "$RSYNC_STATUS" -eq 0 ] &&
[ "$LK_BACKUP_MAIL_ERROR_ONLY" = Y ]; } || {
lk_mail_new
MESSAGE=
{ [ ! -s "$RSYNC_OUT_FILE" ] && [ ! -s "$RSYNC_ERR_FILE" ]; } ||
! TAR=$(lk_mktemp_file) ||
! lk_delete_on_exit "$TAR" ||
! tar -C "${RSYNC_OUT_FILE%/*}" -czf "$TAR" \
"${RSYNC_OUT_FILE##*/}" \
"${RSYNC_ERR_FILE##*/}" || {
lk_mail_attach \
"$TAR" \
"$HN-$SOURCE_NAME-$LK_SNAPSHOT_TIMESTAMP-rsync.log.tgz" \
application/gzip &&
MESSAGE="the attached log files and " || true
}
[ "$STATUS" -eq 0 ] && {
[ "$RSYNC_STATUS" -eq 0 ] && {
SUBJECT="Success"
MESSAGE="\
The following backup ${RSYNC_RESULT:-completed without error}."
} || {
SUBJECT="Please review"
MESSAGE="\
The following backup ${RSYNC_RESULT:-completed with errors}. Please review \
${MESSAGE}the output below${MESSAGE:+,} and take action if required."
}
} || {
SUBJECT="ACTION REQUIRED"
MESSAGE="\
The following backup ${RSYNC_RESULT:-failed to complete}. Please review \
${MESSAGE}the output below${MESSAGE:+,} and action accordingly."
}
SUBJECT="$SUBJECT: backup of $SOURCE_NAME to $HN:$BACKUP_ROOT"
MESSAGE="
Hello
$MESSAGE
Source: $SOURCE
Destination: $BACKUP_ROOT on $FQDN
Transport: $SOURCE_TYPE
Snapshot: $LK_SNAPSHOT_TIMESTAMP
Status: $(get_stage)
Running as: $USER
Command line:
$(printf '%q' "$0" && { [ ${#_LK_ARGV[@]} -eq 0 ] || printf ' \\\n %q' "${_LK_ARGV[@]}"; })
Output:
$(lk_strip_non_printing -d '\v' <"$SNAPSHOT_LOG_FILE")" &&
lk_mail_set_text "$MESSAGE" &&
lk_mail_send "$SUBJECT" "$LK_BACKUP_MAIL" "$LK_BACKUP_MAIL_FROM" || true
}
}
function find_custom() {
local ARR="${1//-/_}[@]" FILE COUNT=0
for FILE in {"$LK_BASE/etc/backup","$BACKUP_ROOT/conf.d"}/{"$1","$SOURCE_NAME-$1","$SOURCE_NAME/$1"} \
${!ARR+"${!ARR}"}; do
[ -e "$FILE" ] || continue
realpath "$FILE" || lk_die
((++COUNT))
done
((COUNT))
}
function run_custom_hook() {
local HOOK=$1 SCRIPTS SOURCE_SCRIPT i=0 LINES LINE SH \
LK_SOURCE_SCRIPT_ALREADY_STARTED=0 \
LK_SOURCE_SCRIPT_ALREADY_FINISHED=0
! is_stage_complete "hook-$HOOK-started" ||
LK_SOURCE_SCRIPT_ALREADY_STARTED=1
! is_stage_complete "hook-$HOOK-finished" ||
LK_SOURCE_SCRIPT_ALREADY_FINISHED=1
if SCRIPTS=($(find_custom "hook-$HOOK")); then
mark_stage_complete "hook-$HOOK-started"
for SOURCE_SCRIPT in "${SCRIPTS[@]}"; do
lk_console_item "Running hook script:" "$SOURCE_SCRIPT"
(
STATUS=0
(. "$SOURCE_SCRIPT") || STATUS=$?
echo "# ." >&8
exit "$STATUS"
) &
LINES=()
while IFS= read -ru 8 LINE && [ "$LINE" != "# ." ]; do
LINES[$((i++))]=$LINE
done
wait "$!" ||
lk_die "hook script failed (exit status $?)"
[ ${#LINES[@]} -eq 0 ] || {
SH=$(lk_echo_array LINES)
eval "$SH" ||
_LK_TTY_COLOUR2='' _LK_TTY_NO_FOLD=1 \
lk_console_error -r "\
Shell commands emitted by hook script failed (exit status $?):" $'\n'"$SH" ||
lk_die ""
}
lk_console_log "Hook script finished"
done
mark_stage_complete "hook-$HOOK-finished"
fi
}
function assert_stage_valid() {
[ -n "$1" ] &&
lk_in_array "$1" SNAPSHOT_STAGES ||
lk_die "invalid stage: $1"
}
function mark_stage_complete() {
is_stage_complete "$1" ||
touch "$LK_SNAPSHOT/.$1"
}
function is_stage_complete() {
assert_stage_valid "$1"
[ -e "$LK_SNAPSHOT/.$1" ]
}
function get_stage() {
local STAGE
for STAGE in $(tac < <(lk_echo_array SNAPSHOT_STAGES)) starting; do
[ ! -e "$LK_SNAPSHOT/.$STAGE" ] || break
done
echo "${STAGE//-/ }"
}
# run_rsync [SOURCE DEST]
function run_rsync() {
local SRC=${1-} DEST=${2-}
[ $# -eq 2 ] || {
SRC=${SOURCE%/}/
DEST=$LK_SNAPSHOT_FS/
}
lk_run rsync "${RSYNC_ARGS[@]}" "$SRC" "$DEST" \
> >(lk_log_bypass_stdout tee -a "$RSYNC_OUT_FILE") \
2> >(lk_log_bypass_stdout tee -a "$RSYNC_ERR_FILE")
}
SNAPSHOT_STAGES=(
previous-copy-started
previous-copy-finished
hook-pre_rsync-started
hook-pre_rsync-finished
rsync-started
rsync-partial_transfer-finished
rsync-finished
hook-post_rsync-started
hook-post_rsync-finished
finished
)
SNAPSHOT_GROUP=
filter_rsync=()
hook_pre_rsync=()
hook_post_rsync=()
LK_USAGE="\
Usage: ${0##*/} [OPTIONS] SOURCE_NAME SOURCE BACKUP_ROOT [-- RSYNC_ARG...]
Use hard links to duplicate the previous SOURCE_NAME snapshot at BACKUP_ROOT,
then rsync from SOURCE to the replica to create a new snapshot of SOURCE_NAME.
This approach doesn't preserve historical file modes but uses less storage than
rsync --link-dest, which breaks hard links when permissions change.
Custom hook scripts are processed in the following order. Rsync filters are
added in the reverse order.
1. $LK_BASE/etc/backup/<filter-rsync|hook-HOOK>
2. $LK_BASE/etc/backup/<SOURCE_NAME>/<filter-rsync|hook-HOOK>
3. <BACKUP_ROOT>/conf.d/<filter-rsync|hook-HOOK>
4. <BACKUP_ROOT>/conf.d/<SOURCE_NAME>/<filter-rsync|hook-HOOK>
5. command-line
Hook scripts are sourced in a Bash subshell. If they return zero, any output on
file descriptor 8 is eval'd in the global scope of ${0##*/}.
Options:
-g, --group GROUP create snapshot directories with group GROUP
-f, --filter RSYNC_FILTER add filtering rules from file RSYNC_FILTER
-h, --hook HOOK:BASH_SCRIPT register BASH_SCRIPT with HOOK
Sources:
SSH_HOST:SOURCE_PATH
RSYNC_HOST::SOURCE_PATH
SOURCE_PATH
Hooks:
pre_rsync
post_rsync"
lk_getopt "g:f:h:" \
"group:,filter:,hook:"
eval "set -- $LK_GETOPT"
while :; do
OPT=$1
shift
case "$OPT" in
-g | --group)
# TODO: add macOS-friendly test
getent group "$1" &>/dev/null ||
lk_die "group not found: $1"
SNAPSHOT_GROUP=$1
shift
;;
-f | --filter)
[ -f "$1" ] || lk_die "file not found: $1"
filter_rsync+=("$1")
shift
;;
-h | --hook)
[[ $1 =~ ^(pre_rsync|post_rsync):(.+)$ ]] ||
lk_die "invalid argument: $1"
HOOK=${BASH_REMATCH[1]}
HOOK_SCRIPT=${BASH_REMATCH[2]}
[ -f "$HOOK_SCRIPT" ] || lk_die "file not found: $HOOK_SCRIPT"
eval "hook_$HOOK+=(\"\$HOOK_SCRIPT\")"
shift
;;
--)
break
;;
esac
done
[ $# -ge 3 ] || lk_usage
SOURCE_NAME=$1
SOURCE=$2
BACKUP_ROOT=$3
shift 3
case "$SOURCE" in
*::*)
RSYNC_HOST=${SOURCE%%::*}
SOURCE_PATH=${SOURCE#*::}
SOURCE_TYPE="rsync"
;;
*:*)
SSH_HOST=${SOURCE%%:*}
SOURCE_PATH=${SOURCE#*:}
SOURCE_TYPE="rsync over SSH"
;;
*)
SOURCE_PATH=$SOURCE
SOURCE_TYPE="filesystem"
;;
esac
[ -d "$BACKUP_ROOT" ] || lk_die "directory not found: $BACKUP_ROOT"
[ -w "$BACKUP_ROOT" ] || lk_die "cannot write to directory: $BACKUP_ROOT"
SOURCE_NAME=${SOURCE_NAME//\//_}
BACKUP_ROOT=$(realpath "$BACKUP_ROOT")
JOB_NAME=${BACKUP_ROOT//\//_}-$SOURCE_NAME
lk_lock LOCK_FILE LOCK_FD "${0##*/}-$JOB_NAME"
FIFO_FILE=$(lk_mktemp_dir)/fifo
mkfifo "$FIFO_FILE"
exec 8<>"$FIFO_FILE"
export TZ=UTC
HN=$(lk_hostname) || HN=localhost
FQDN=$(lk_fqdn) || FQDN=$HN.localdomain
SENDER_NAME="${LK_PATH_PREFIX}backup on $HN"
LK_SNAPSHOT_TIMESTAMP=${LK_BACKUP_TIMESTAMP:-$(date +"%Y-%m-%d-%H%M%S")}
LK_SNAPSHOT_ROOT=$BACKUP_ROOT/snapshot/$SOURCE_NAME
LK_SNAPSHOT=$LK_SNAPSHOT_ROOT/$LK_SNAPSHOT_TIMESTAMP
LK_SNAPSHOT_FS=$LK_SNAPSHOT/fs
LK_SNAPSHOT_DB=$LK_SNAPSHOT/db
LK_BACKUP_MAIL=${LK_BACKUP_MAIL-root}
LK_BACKUP_MAIL_FROM=${LK_BACKUP_MAIL_FROM-"$SENDER_NAME <$USER@$FQDN>"}
LK_BACKUP_MAIL_ERROR_ONLY=${LK_BACKUP_MAIL_ERROR_ONLY-Y}
SOURCE_LATEST=$BACKUP_ROOT/latest/$SOURCE_NAME
SNAPSHOT_LOG_FILE=$LK_SNAPSHOT/log/snapshot.log
RSYNC_OUT_FILE=$LK_SNAPSHOT/log/rsync.log
RSYNC_ERR_FILE=$LK_SNAPSHOT/log/rsync.err.log
[ -d "$SOURCE_LATEST" ] ||
[ ! -d "$LK_SNAPSHOT_ROOT" ] ||
SOURCE_LATEST=$LK_SNAPSHOT_ROOT/$(lk_backup_snapshot_latest "$LK_SNAPSHOT_ROOT") ||
SOURCE_LATEST=
! is_stage_complete finished ||
lk_die "already finalised: $LK_SNAPSHOT"
umask 022
SOURCE_MODE=00700
SNAPSHOT_MODE=00700
LOG_MODE=00600
[ -z "$SNAPSHOT_GROUP" ] || {
SOURCE_MODE=02770
SNAPSHOT_MODE=02750
LOG_MODE=00640
}
install -d -m 00755 "$BACKUP_ROOT"
install -d -m 00751 "$BACKUP_ROOT"/{latest,snapshot}
install -d -m "$SOURCE_MODE" ${SNAPSHOT_GROUP:+-g "$SNAPSHOT_GROUP"} \
"$LK_SNAPSHOT_ROOT"
install -d -m "$SNAPSHOT_MODE" ${SNAPSHOT_GROUP:+-g "$SNAPSHOT_GROUP"} \
"$LK_SNAPSHOT"/{,db,log}
for f in SNAPSHOT_LOG_FILE RSYNC_OUT_FILE RSYNC_ERR_FILE; do
[ -e "${!f}" ] ||
install -m "$LOG_MODE" /dev/null "${!f}"
done
SNAPSHOT_DEVICE=$(df "$LK_SNAPSHOT" | awk 'END {print $1}')
_LK_LOG_CMDLINE=("$0-$JOB_NAME" "${_LK_ARGV[@]}")
_LK_SECONDARY_LOG_FILE=$SNAPSHOT_LOG_FILE \
lk_log_start
RSYNC_STATUS=0
RSYNC_RESULT=
RSYNC_STAGE_SUFFIX=
lk_trap_add EXIT exit_trap
{
lk_console_message "Backing up $SOURCE_NAME to $HN:$BACKUP_ROOT"
lk_console_detail "Source:" "$SOURCE"
lk_console_detail "Destination:" "$BACKUP_ROOT on $FQDN"
lk_console_detail "Transport:" "$SOURCE_TYPE"
lk_console_detail "Snapshot:" "$LK_SNAPSHOT_TIMESTAMP"
lk_console_detail "Status:" "$(get_stage)"
if [ -d "$SOURCE_LATEST/fs" ] &&
! is_stage_complete previous-copy-finished; then
LATEST=$(realpath "$SOURCE_LATEST/fs")
[ "$LATEST" != "$(realpath "$LK_SNAPSHOT_FS")" ] ||
lk_die "latest and pending snapshots cannot be the same"
lk_console_message "Duplicating previous snapshot using hard links"
! is_stage_complete previous-copy-started || {
lk_console_detail "Deleting incomplete replica from previous run"
rm -Rf "$LK_SNAPSHOT_FS"
}
[ ! -e "$LK_SNAPSHOT_FS" ] ||
lk_die "directory already exists: $LK_SNAPSHOT_FS"
lk_console_detail "Snapshot:" "$LATEST"
lk_console_detail "Replica:" "$LK_SNAPSHOT_FS"
mark_stage_complete previous-copy-started
# Prevent unwelcome set-group-ID propagation
install -d -m 00700 "$LK_SNAPSHOT_FS"
# Limit concurrent snapshot duplication to one job per device, otherwise
# jobs started simultaneously will bottleneck each other before any of
# them progress to syncing
lk_lock -f "/tmp/${0##*/}-${SNAPSHOT_DEVICE//\//_}.copy.lock" -w \
COPY_LOCK_FILE COPY_LOCK_FD
gnu_cp -alT "$LATEST" "$LK_SNAPSHOT_FS"
lk_lock_drop COPY_LOCK_FILE COPY_LOCK_FD
mark_stage_complete previous-copy-finished
lk_console_log "Copy complete"
else
mark_stage_complete previous-copy-finished
fi
lk_console_item "Creating snapshot at" "$LK_SNAPSHOT"
lk_console_detail "Log files:" "$(lk_echo_args \
"$SNAPSHOT_LOG_FILE" "$RSYNC_OUT_FILE" "$RSYNC_ERR_FILE")"
RSYNC_ARGS=(-vrlpt --delete --stats "$@")
! RSYNC_FILTERS=($(find_custom filter-rsync | tac)) || {
lk_console_detail "Rsync filter:" \
"$(lk_echo_args "${RSYNC_FILTERS[@]/#/. }")"
RSYNC_ARGS+=(--delete-excluded)
for RSYNC_FILTER in "${RSYNC_FILTERS[@]}"; do
RSYNC_ARGS+=(--filter ". $RSYNC_FILTER")
done
}
run_custom_hook pre_rsync
! lk_in_array --inplace RSYNC_ARGS &&
! lk_in_array --write-devices RSYNC_ARGS ||
lk_die "invalid rsync arguments (--inplace not supported)"
! lk_in_array --dry-run RSYNC_ARGS &&
! lk_in_array -n RSYNC_ARGS || DRY_RUN=1
[ "${DRY_RUN:-0}" -ne 0 ] || mark_stage_complete rsync-started
run_rsync || RSYNC_STATUS=$?
STATUS=$RSYNC_STATUS
case "$STATUS" in
0)
RSYNC_RESULT="completed successfully"
;;
23 | 24)
RSYNC_RESULT="completed with transfer errors"
RSYNC_STAGE_SUFFIX=partial_transfer
STATUS=0
;;
*)
RSYNC_RESULT="failed to complete"
;;
esac
[ "${DRY_RUN:-0}" -ne 0 ] || [ "$STATUS" -ne 0 ] ||
mark_stage_complete \
"rsync${RSYNC_STAGE_SUFFIX:+-$RSYNC_STAGE_SUFFIX}-finished"
lk_console_log "rsync $RSYNC_RESULT (exit status $RSYNC_STATUS)"
run_custom_hook post_rsync
[ "${DRY_RUN:-0}" -ne 0 ] || [ "$STATUS" -ne 0 ] || {
lk_console_message "Updating latest snapshot symlink for $SOURCE_NAME"
ln -sfnv "$LK_SNAPSHOT" "$BACKUP_ROOT/latest/$SOURCE_NAME"
mark_stage_complete finished
}
exit "$STATUS"
}
|
# ex:ts=8 sw=4:
# $OpenBSD: Ustar.pm,v 1.87 2016/04/02 11:07:50 espie Exp $
#
# Copyright (c) 2002-2014 Marc Espie <espie@openbsd.org>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# Handle utar archives
use strict;
use warnings;
package OpenBSD::Ustar;
use constant {
FILE => "\0",
FILE1 => '0',
HARDLINK => '1',
SOFTLINK => '2',
CHARDEVICE => '3',
BLOCKDEVICE => '4',
DIR => '5',
FIFO => '6',
CONTFILE => '7',
USTAR_HEADER => 'a100a8a8a8a12a12a8aa100a6a2a32a32a8a8a155a12',
MAXFILENAME => 100,
MAXLINKNAME => 100,
MAXPREFIX => 155,
MAXUSERNAME => 32,
MAXGROUPNAME => 32,
XHDR => 'x',
# XXX those are NOT supported, just recognized
GHDR => 'g',
LONGLINK => 'K',
LONGNAME => 'L',
};
use File::Basename ();
use OpenBSD::IdCache;
use OpenBSD::Paths;
our $uidcache = new OpenBSD::UidCache;
our $gidcache = new OpenBSD::GidCache;
our $unamecache = new OpenBSD::UnameCache;
our $gnamecache = new OpenBSD::GnameCache;
# This is a multiple of st_blksize everywhere....
my $buffsize = 2 * 1024 * 1024;
sub new
{
my ($class, $fh, $state, $destdir) = @_;
$destdir = '' unless defined $destdir;
return bless {
fh => $fh,
swallow => 0,
state => $state,
key => {},
destdir => $destdir} , $class;
}
sub set_description
{
my ($self, $d) = @_;
$self->{description} = $d;
}
sub set_callback
{
my ($self, $code) = @_;
$self->{callback} = $code;
}
sub fatal
{
my ($self, $msg, @args) = @_;
$self->{state}->fatal("Ustar [#1][#2]: #3",
$self->{description} // '?', $self->{lastname} // '?',
$self->{state}->f($msg, @args));
}
sub new_object
{
my ($self, $h, $class) = @_;
$h->{archive} = $self;
$h->{destdir} = $self->{destdir};
bless $h, $class;
return $h;
}
sub skip
{
my $self = shift;
my $temp;
while ($self->{swallow} > 0) {
my $toread = $self->{swallow};
if ($toread >$buffsize) {
$toread = $buffsize;
}
my $actual = read($self->{fh}, $temp, $toread);
if (!defined $actual) {
$self->fatal("Error while skipping archive: #1", $!);
}
if ($actual == 0) {
$self->fatal("Premature end of archive in header: #1", $!);
}
$self->{swallow} -= $actual;
}
}
my $types = {
DIR , 'OpenBSD::Ustar::Dir',
HARDLINK , 'OpenBSD::Ustar::HardLink',
SOFTLINK , 'OpenBSD::Ustar::SoftLink',
FILE , 'OpenBSD::Ustar::File',
FILE1 , 'OpenBSD::Ustar::File',
FIFO , 'OpenBSD::Ustar::Fifo',
CHARDEVICE , 'OpenBSD::Ustar::CharDevice',
BLOCKDEVICE , 'OpenBSD::Ustar::BlockDevice',
};
my $unsupported = {
XHDR => 'Extended header',
GHDR => 'GNU header',
LONGLINK => 'Long symlink',
LONGNAME => 'Long file',
};
sub read_records
{
my ($self, $size) = @_;
my $toread = $self->{swallow};
my $result = '';
while ($toread > 0) {
my $buffer;
my $maxread = $buffsize;
$maxread = $toread if $maxread > $toread;
my $actual = read($self->{fh}, $buffer, $maxread);
if (!defined $actual) {
$self->fatal("Error reading from archive: #1", $!);
}
if ($actual == 0) {
$self->fatal("Premature end of archive");
}
$self->{swallow} -= $actual;
$toread -= $actual;
$result .= $buffer;
}
return substr($result, 0, $size);
}
sub parse_records
{
my ($self, $result, $h) = @_;
open(my $fh, '<', \$h);
while (<$fh>) {
chomp;
if (m/^(\d+)\s+(\w+?)\=(.*)$/) {
my ($k, $v) = ($2, $3);
if ($k eq 'path') {
$result->{name} = $v;
} elsif ($k eq 'linkpath') {
$result->{linkname} = $v;
}
}
}
}
sub next
{
my $self = shift;
# get rid of the current object
$self->skip;
my $header;
my $n = read($self->{fh}, $header, 512);
return if (defined $n) and $n == 0;
$self->fatal("Error while reading header")
unless defined $n and $n == 512;
if ($header eq "\0"x512) {
return $self->next;
}
# decode header
my ($name, $mode, $uid, $gid, $size, $mtime, $chksum, $type,
$linkname, $magic, $version, $uname, $gname, $major, $minor,
$prefix, $pad) = unpack(USTAR_HEADER, $header);
if ($magic ne "ustar\0" || $version ne '00') {
$self->fatal("Not an ustar archive header");
}
# verify checksum
my $value = $header;
substr($value, 148, 8) = " "x8;
my $ck2 = unpack("%C*", $value);
if ($ck2 != oct($chksum)) {
$self->fatal("Bad archive checksum");
}
$name =~ s/\0*$//o;
$mode = oct($mode) & 0xfff;
$uname =~ s/\0*$//o;
$gname =~ s/\0*$//o;
$linkname =~ s/\0*$//o;
$major = oct($major);
$minor = oct($minor);
$uid = oct($uid);
$gid = oct($gid);
$uid = $uidcache->lookup($uname, $uid);
$gid = $gidcache->lookup($gname, $gid);
{
no warnings; # XXX perl warns if oct converts >= 2^32 values
$mtime = oct($mtime);
}
unless ($prefix =~ m/^\0/o) {
$prefix =~ s/\0*$//o;
$name = "$prefix/$name";
}
$self->{lastname} = $name;
$size = oct($size);
my $result= {
name => $name,
mode => $mode,
atime => $mtime,
mtime => $mtime,
linkname=> $linkname,
uname => $uname,
uid => $uid,
gname => $gname,
gid => $gid,
size => $size,
major => $major,
minor => $minor,
};
# adjust swallow
$self->{swallow} = $size;
if ($size % 512) {
$self->{swallow} += 512 - $size % 512;
}
if ($type eq XHDR) {
my $h = $self->read_records($size);
$result = $self->next;
$self->parse_records($result, $h);
return $result;
}
if (defined $types->{$type}) {
$self->new_object($result, $types->{$type});
} else {
$self->fatal("Unsupported type #1 (#2)", $type,
$unsupported->{$type} // "unknown");
}
if (!$result->isFile && $result->{size} != 0) {
$self->fatal("Bad archive: non null size for #1 (#2)",
$types->{$type}, $result->{name});
}
$self->{cachename} = $name;
return $result;
}
sub split_name
{
my $name = shift;
my $prefix = '';
my $l = length $name;
if ($l > MAXFILENAME && $l <= MAXFILENAME+MAXPREFIX+1) {
while (length($name) > MAXFILENAME &&
$name =~ m/^(.*?\/)(.*)$/o) {
$prefix .= $1;
$name = $2;
}
$prefix =~ s|/$||;
}
return ($prefix, $name);
}
sub extended_record
{
my ($k, $v) = @_;
my $string = " $k=$v\n";
my $len = length($string);
if ($len < 995) {
return sprintf("%3d", $len+3).$string;
} elsif ($len < 9995) {
return sprintf("%04d", $len+4).$string;
} else {
return sprintf("%05d", $len+5).$string;
}
}
sub pack_header
{
my ($archive, $type, $size, $entry, $prefix, $name, $linkname,
$uname, $gname, $major, $minor) = @_;
my $header;
my $cksum = ' 'x8;
for (1 .. 2) {
$header = pack(USTAR_HEADER,
$name,
sprintf("%07o", $entry->{mode}),
sprintf("%07o", $entry->{uid} // 0),
sprintf("%07o", $entry->{gid} // 0),
sprintf("%011o", $size),
sprintf("%011o", $entry->{mtime} // 0),
$cksum,
$type,
$linkname,
'ustar', '00',
$uname,
$gname,
sprintf("%07o", $major),
sprintf("%07o", $minor),
$prefix, "\0");
$cksum = sprintf("%07o", unpack("%C*", $header));
}
return $header;
}
my $whatever = "usual_suspect000";
sub mkheader
{
my ($archive, $entry, $type) = @_;
my ($prefix, $name) = split_name($entry->name);
my ($extendedname, $extendedlink);
my $linkname = $entry->{linkname};
my $size = $entry->{size};
my ($major, $minor);
if ($entry->isDevice) {
$major = $entry->{major};
$minor = $entry->{minor};
} else {
$major = 0;
$minor = 0;
}
my ($uname, $gname);
if (defined $entry->{uname}) {
$uname = $entry->{uname};
} else {
$uname = $entry->{uid};
}
if (defined $entry->{gname}) {
$gname = $entry->{gname};
} else {
$gname = $entry->{gid};
}
if (defined $entry->{cwd}) {
my $cwd = $entry->{cwd};
$cwd.='/' unless $cwd =~ m/\/$/o;
$linkname =~ s/^\Q$cwd\E//;
}
if (!defined $linkname) {
$linkname = '';
}
if (length $prefix > MAXPREFIX) {
$prefix = substr($prefix, 0, MAXPREFIX);
$extendedname = 1;
}
if (length $name > MAXFILENAME) {
$name = substr($name, 0, MAXPREFIX);
$extendedname = 1;
}
if (length $linkname > MAXLINKNAME) {
$linkname = substr($linkname, 0, MAXLINKNAME);
$extendedlink = 1;
}
if (length $uname > MAXUSERNAME) {
$archive->fatal("Username too long #1", $uname);
}
if (length $gname > MAXGROUPNAME) {
$archive->fatal("Groupname too long #1", $gname);
}
my $header = $archive->pack_header($type, $size, $entry,
$prefix, $name, $linkname, $uname, $gname, $major, $minor);
my $x;
if ($extendedname) {
$x .= extended_record("path", $entry->name);
}
if ($extendedlink) {
$x .= extended_record("linkpath",$entry->{linkname});
}
if ($x) {
my $extended = $archive->pack_header(XHDR, length($x), $entry,
'', $whatever, '', $uname, $gname, $major, $minor);
$whatever++;
if ((length $x) % 512) {
$x .= "\0" x (512 - ((length $x) % 512));
}
return $extended.$x.$header;
}
return $header;
}
sub prepare
{
my ($self, $filename, $destdir) = @_;
$destdir //= $self->{destdir};
my $realname = "$destdir/$filename";
my ($dev, $ino, $mode, $uid, $gid, $rdev, $size, $mtime) =
(lstat $realname)[0,1,2, 4,5,6,7, 9];
my $entry = {
key => "$dev/$ino",
name => $filename,
realname => $realname,
mode => $mode,
uid => $uid,
gid => $gid,
size => $size,
mtime => $mtime,
uname => $unamecache->lookup($uid),
gname => $gnamecache->lookup($gid),
major => $rdev/256,
minor => $rdev%256,
};
my $k = $entry->{key};
my $class = "OpenBSD::Ustar::File"; # default
if (defined $self->{key}{$k}) {
$entry->{linkname} = $self->{key}{$k};
$class = "OpenBSD::Ustar::HardLink";
} elsif (-l $realname) {
$entry->{linkname} = readlink($realname);
$class = "OpenBSD::Ustar::SoftLink";
} elsif (-p _) {
$class = "OpenBSD::Ustar::Fifo";
} elsif (-c _) {
$class = "OpenBSD::Ustar::CharDevice";
} elsif (-b _) {
$class ="OpenBSD::Ustar::BlockDevice";
} elsif (-d _) {
$class = "OpenBSD::Ustar::Dir";
}
$self->new_object($entry, $class);
if (!$entry->isFile) {
$entry->{size} = 0;
}
return $entry;
}
sub pad
{
my $self = shift;
my $fh = $self->{fh};
print $fh "\0"x1024 or $self->fatal("Error writing to archive: #1", $!);
}
sub close
{
my $self = shift;
if (defined $self->{padout}) {
$self->pad;
}
close($self->{fh});
}
sub destdir
{
my $self = shift;
if (@_ > 0) {
$self->{destdir} = shift;
} else {
return $self->{destdir};
}
}
sub fh
{
return $_[0]->{fh};
}
package OpenBSD::Ustar::Object;
sub recheck_owner
{
my $entry = shift;
# XXX weird format to prevent cvs from expanding OpenBSD id
$entry->{uid} //= $OpenBSD::Ustar::uidcache
->lookup($entry->{uname});
$entry->{gid} //= $OpenBSD::Ustar::gidcache
->lookup($entry->{gname});
}
sub fatal
{
my ($self, @args) = @_;
$self->{archive}->fatal(@args);
}
sub system
{
my ($self, @args) = @_;
$self->{archive}{state}->system(@args);
}
sub errsay
{
my ($self, @args) = @_;
$self->{archive}{state}->errsay(@args);
}
sub left_todo
{
my ($self, $toread) = @_;
return if $toread == 0;
return unless defined $self->{archive}{callback};
&{$self->{archive}{callback}}($self->{size} - $toread);
}
sub name
{
my $self = shift;
return $self->{name};
}
sub set_name
{
my ($self, $v) = @_;
$self->{name} = $v;
}
sub set_modes
{
my $self = shift;
chown $self->{uid}, $self->{gid}, $self->{destdir}.$self->name;
chmod $self->{mode}, $self->{destdir}.$self->name;
if (defined $self->{mtime} || defined $self->{atime}) {
utime $self->{atime} // time, $self->{mtime} // time,
$self->{destdir}.$self->name;
}
}
sub ensure_dir
{
my ($self, $dir) = @_;
return if -d $dir;
$self->ensure_dir(File::Basename::dirname($dir));
if (mkdir($dir)) {
return;
}
$self->fatal("Error making directory #1: #2", $dir, $!);
}
sub make_basedir
{
my $self = shift;
my $dir = $self->{destdir}.File::Basename::dirname($self->name);
$self->ensure_dir($dir);
}
sub write
{
my $self = shift;
my $arc = $self->{archive};
my $out = $arc->{fh};
$arc->{padout} = 1;
my $header = $arc->mkheader($self, $self->type);
print $out $header or $self->fatal("Error writing to archive: #1", $!);
$self->write_contents($arc);
my $k = $self->{key};
if (!defined $arc->{key}{$k}) {
$arc->{key}{$k} = $self->name;
}
}
sub alias
{
my ($self, $arc, $alias) = @_;
my $k = $self->{archive}.":".$self->{archive}{cachename};
if (!defined $arc->{key}{$k}) {
$arc->{key}{$k} = $alias;
}
}
sub write_contents
{
# only files have anything to write
}
sub resolve_links
{
# only hard links must cheat
}
sub copy_contents
{
# only files need copying
}
sub copy
{
my ($self, $wrarc) = @_;
my $out = $wrarc->{fh};
$self->resolve_links($wrarc);
$wrarc->{padout} = 1;
my $header = $wrarc->mkheader($self, $self->type);
print $out $header or $self->fatal("Error writing to archive: #1", $!);
$self->copy_contents($wrarc);
}
sub isDir() { 0 }
sub isFile() { 0 }
sub isDevice() { 0 }
sub isFifo() { 0 }
sub isLink() { 0 }
sub isSymLink() { 0 }
sub isHardLink() { 0 }
package OpenBSD::Ustar::Dir;
our @ISA=qw(OpenBSD::Ustar::Object);
sub create
{
my $self = shift;
$self->ensure_dir($self->{destdir}.$self->name);
$self->set_modes;
}
sub isDir() { 1 }
sub type() { OpenBSD::Ustar::DIR }
package OpenBSD::Ustar::HardLink;
our @ISA=qw(OpenBSD::Ustar::Object);
sub create
{
my $self = shift;
$self->make_basedir;
my $linkname = $self->{linkname};
if (defined $self->{cwd}) {
$linkname=$self->{cwd}.'/'.$linkname;
}
link $self->{destdir}.$linkname, $self->{destdir}.$self->name or
$self->fatal("Can't link #1#2 to #1#3: #4",
$self->{destdir}, $linkname, $self->name, $!);
}
sub resolve_links
{
my ($self, $arc) = @_;
my $k = $self->{archive}.":".$self->{linkname};
if (defined $arc->{key}{$k}) {
$self->{linkname} = $arc->{key}{$k};
} else {
print join("\n", keys(%{$arc->{key}})), "\n";
$self->fatal("Can't copy link over: original for #1 NOT available", $k);
}
}
sub isLink() { 1 }
sub isHardLink() { 1 }
sub type() { OpenBSD::Ustar::HARDLINK }
package OpenBSD::Ustar::SoftLink;
our @ISA=qw(OpenBSD::Ustar::Object);
sub create
{
my $self = shift;
$self->make_basedir;
symlink $self->{linkname}, $self->{destdir}.$self->name or
$self->fatal("Can't symlink #1 to #2#3: #4",
$self->{linkname}, $self->{destdir}, $self->name, $!);
require POSIX;
POSIX::lchown($self->{uid}, $self->{gid}, $self->{destdir}.$self->name);
}
sub isLink() { 1 }
sub isSymLink() { 1 }
sub type() { OpenBSD::Ustar::SOFTLINK }
package OpenBSD::Ustar::Fifo;
our @ISA=qw(OpenBSD::Ustar::Object);
sub create
{
my $self = shift;
$self->make_basedir;
require POSIX;
POSIX::mkfifo($self->{destdir}.$self->name, $self->{mode}) or
$self->fatal("Can't create fifo #1#2: #3", $self->{destdir},
$self->name, $!);
$self->set_modes;
}
sub isFifo() { 1 }
sub type() { OpenBSD::Ustar::FIFO }
package OpenBSD::UStar::Device;
our @ISA=qw(OpenBSD::Ustar::Object);
sub create
{
my $self = shift;
$self->make_basedir;
$self->system(OpenBSD::Paths->mknod,
'-m', $self->{mode}, '--', $self->{destdir}.$self->name,
$self->devicetype, $self->{major}, $self->{minor});
$self->set_modes;
}
sub isDevice() { 1 }
package OpenBSD::Ustar::BlockDevice;
our @ISA=qw(OpenBSD::Ustar::Device);
sub type() { OpenBSD::Ustar::BLOCKDEVICE }
sub devicetype() { 'b' }
package OpenBSD::Ustar::CharDevice;
our @ISA=qw(OpenBSD::Ustar::Device);
sub type() { OpenBSD::Ustar::BLOCKDEVICE }
sub devicetype() { 'c' }
package OpenBSD::CompactWriter;
use constant {
FH => 0,
BS => 1,
ZEROES => 2,
UNFINISHED => 3,
};
sub new
{
my ($class, $fname) = @_;
open (my $out, '>', $fname) or return;
my $bs = (stat $out)[11];
my $zeroes;
if (defined $bs) {
$zeroes = "\x00"x$bs;
}
bless [ $out, $bs, $zeroes, 0 ], $class;
}
sub write
{
my ($self, $buffer) = @_;
my ($fh, $bs, $zeroes, $e) = @$self;
START:
if (defined $bs) {
for (my $i = 0; $i + $bs <= length($buffer); $i+= $bs) {
if (substr($buffer, $i, $bs) eq $zeroes) {
my $r = syswrite($fh, $buffer, $i);
unless (defined $r && $r == $i) {
return 0;
}
$i+=$bs;
my $seek_forward = $bs;
while (substr($buffer, $i, $bs) eq $zeroes) {
$i += $bs;
$seek_forward += $bs;
}
defined(sysseek($fh, $seek_forward, 1))
or return 0;
$buffer = substr($buffer, $i);
if (length $buffer == 0) {
$self->[UNFINISHED] = 1;
return 1;
}
goto START;
}
}
}
$self->[UNFINISHED] = 0;
my $r = syswrite($fh, $buffer);
if (defined $r && $r == length $buffer) {
return 1;
} else {
return 0;
}
}
sub close
{
my ($self) = @_;
if ($self->[UNFINISHED]) {
defined(sysseek($self->[FH], -1, 1)) or return 0;
defined(syswrite($self->[FH], "\0")) or return 0;
}
return 1;
}
package OpenBSD::Ustar::File;
our @ISA=qw(OpenBSD::Ustar::Object);
sub create
{
my $self = shift;
$self->make_basedir;
my $buffer;
my $out = OpenBSD::CompactWriter->new($self->{destdir}.$self->name);
if (!defined $out) {
$self->fatal("Can't write to #1#2: #3", $self->{destdir},
$self->name, $!);
}
my $toread = $self->{size};
if ($self->{partial}) {
$toread -= length($self->{partial});
unless ($out->write($self->{partial})) {
$self->fatal("Error writing to #1#2: #3",
$self->{destdir}, $self->name, $!);
}
}
while ($toread > 0) {
my $maxread = $buffsize;
$maxread = $toread if $maxread > $toread;
my $actual = read($self->{archive}{fh}, $buffer, $maxread);
if (!defined $actual) {
$self->fatal("Error reading from archive: #1", $!);
}
if ($actual == 0) {
$self->fatal("Premature end of archive");
}
$self->{archive}{swallow} -= $actual;
unless ($out->write($buffer)) {
$self->fatal("Error writing to #1#2: #3",
$self->{destdir}, $self->name, $!);
}
$toread -= $actual;
$self->left_todo($toread);
}
$out->close or $self->fatal("Error closing #1#2: #3",
$self->{destdir}, $self->name, $!);
$self->set_modes;
}
sub contents
{
my ($self, $lookfor) = @_;
my $toread = $self->{size};
my $buffer;
my $offset = 0;
if ($self->{partial}) {
$buffer = $self->{partial};
$offset = length($self->{partial});
$toread -= $offset;
}
while ($toread != 0) {
my $sz = $toread;
if (defined $lookfor) {
last if (defined $buffer) and &$lookfor($buffer);
$sz = 1024 if $sz > 1024;
}
my $actual = read($self->{archive}{fh}, $buffer, $sz, $offset);
if (!defined $actual) {
$self->fatal("Error reading from archive: #1", $!);
}
if ($actual != $sz) {
$self->fatal("Error: short read from archive");
}
$self->{archive}{swallow} -= $actual;
$toread -= $actual;
$offset += $actual;
}
$self->{partial} = $buffer;
return $buffer;
}
sub write_contents
{
my ($self, $arc) = @_;
my $filename = $self->{realname};
my $size = $self->{size};
my $out = $arc->{fh};
open my $fh, "<", $filename or $self->fatal("Can't read file #1: #2",
$filename, $!);
my $buffer;
my $toread = $size;
while ($toread > 0) {
my $maxread = $buffsize;
$maxread = $toread if $maxread > $toread;
my $actual = read($fh, $buffer, $maxread);
if (!defined $actual) {
$self->fatal("Error reading from file: #1", $!);
}
if ($actual == 0) {
$self->fatal("Premature end of file");
}
unless (print $out $buffer) {
$self->fatal("Error writing to archive: #1", $!);
}
$toread -= $actual;
$self->left_todo($toread);
}
if ($size % 512) {
print $out "\0" x (512 - $size % 512) or
$self->fatal("Error writing to archive: #1", $!);
}
}
sub copy_contents
{
my ($self, $arc) = @_;
my $out = $arc->{fh};
my $buffer;
my $size = $self->{size};
my $toread = $size;
while ($toread > 0) {
my $maxread = $buffsize;
$maxread = $toread if $maxread > $toread;
my $actual = read($self->{archive}{fh}, $buffer, $maxread);
if (!defined $actual) {
$self->fatal("Error reading from archive: #1", $!);
}
if ($actual == 0) {
$self->fatal("Premature end of archive");
}
$self->{archive}{swallow} -= $actual;
unless (print $out $buffer) {
$self->fatal("Error writing to archive #1", $!);
}
$toread -= $actual;
}
if ($size % 512) {
print $out "\0" x (512 - $size % 512) or
$self->fatal("Error writing to archive: #1", $!);
}
$self->alias($arc, $self->name);
}
sub isFile() { 1 }
sub type() { OpenBSD::Ustar::FILE1 }
1;
|
package mario
func maxTurbulenceSize(arr []int) int {
if len(arr) < 2 {
return len(arr)
}
maxLength := 1
{
lastTrend := arr[1] - arr[0]
var length int
if lastTrend == 0 {
length = 1
} else {
length = 2
}
for i := 2; i < len(arr); i++ {
currTrend := arr[i] - arr[i-1]
if isOpposite(lastTrend, currTrend) {
lastTrend = currTrend
length++
continue
}
if length > maxLength {
maxLength = length
}
if currTrend == 0 {
length = 1
} else {
length = 2 // 这里不兼容初始化lt = 0和后续lt = 0的情况,所以整体从i = 2开始处理
}
lastTrend = currTrend
}
if length > maxLength {
maxLength = length
}
}
return maxLength
}
func isOpposite(a, b int) bool {
return (a > 0 && b < 0) || (a < 0 && b > 0)
}
|
package com.example.timelineview
import android.view.View
import androidx.annotation.DrawableRes
import androidx.annotation.StringRes
import androidx.appcompat.widget.AppCompatTextView
import androidx.recyclerview.widget.RecyclerView
import com.tcqq.timelineview.TimelineView
import eu.davidea.flexibleadapter.FlexibleAdapter
import eu.davidea.flexibleadapter.items.AbstractFlexibleItem
import eu.davidea.flexibleadapter.items.IFlexible
import eu.davidea.viewholders.FlexibleViewHolder
/**
* @author Perry Lance
* @since 2019-03-26 Created
*/
data class MilestoneCompareItem(
val id: String,
val title: String,
val status: MilestoneCompareStatus
) : AbstractFlexibleItem<MilestoneCompareItem.ViewHolder>() {
override fun getLayoutRes(): Int {
return R.layout.item_milestone_compare
}
override fun createViewHolder(
view: View,
adapter: FlexibleAdapter<IFlexible<RecyclerView.ViewHolder>>
): ViewHolder {
return ViewHolder(view, adapter)
}
override fun bindViewHolder(
adapter: FlexibleAdapter<IFlexible<RecyclerView.ViewHolder>>,
holder: ViewHolder,
position: Int,
payloads: MutableList<Any>
) {
holder.title.text = title
holder.timeline.initLine(TimelineView.getTimeLineViewType(position, adapter.itemCount))
when (status) {
MilestoneCompareStatus.COMPLETED -> setStatus(
holder.timeline,
R.drawable.ic_check_circle_black_24dp,
holder.status,
R.string.completed
)
MilestoneCompareStatus.REQUEST_TO_MODIFY_THE_CONTRACT -> setStatus(
holder.timeline,
R.drawable.ic_radio_button_checked_black_24dp,
holder.status,
R.string.request_to_modify_the_contract
)
MilestoneCompareStatus.INACTIVE -> setStatus(
holder.timeline,
R.drawable.ic_radio_button_unchecked_black_24dp,
holder.status,
R.string.inactive
)
}
}
private fun setStatus(
timeline: TimelineView, @DrawableRes iconRes: Int,
statusTextView: AppCompatTextView, @StringRes statusTextRes: Int
) {
val context = timeline.context
statusTextView.text = context.getString(statusTextRes)
timeline.marker = VectorDrawableUtils.getDrawable(
context,
iconRes,
ThemeUtils.getThemeValue(R.attr.colorSecondary, context)
)
}
class ViewHolder(view: View, adapter: FlexibleAdapter<*>) : FlexibleViewHolder(view, adapter) {
var timeline: TimelineView = view.findViewById(R.id.timeline)
var title: AppCompatTextView = view.findViewById(R.id.title)
var status: AppCompatTextView = view.findViewById(R.id.status)
}
}
|
(ns girouette.grammar.hiccup-tag-test
(:require [clojure.test :refer [deftest testing is are]]
[girouette.grammar.hiccup-tag :refer [hiccup-tag-parser]]))
(deftest parser-test
(are [kw expected-parsed-data]
(= expected-parsed-data (hiccup-tag-parser (name kw)))
:div
[:hiccup-tag [:html-tag "div"]]
:div#app.foo
[:hiccup-tag [:html-tag "div"] [:id "app"] [:class-name "foo"]]
:div.foo#here
[:hiccup-tag [:html-tag "div"] [:class-name "foo"] [:id "here"]]
:div#app.foo#here.bar
[:hiccup-tag [:html-tag "div"] [:id "app"] [:class-name "foo"] [:id "here"] [:class-name "bar"]]))
|
<?php
namespace App\User\Infraestructure\Command;
use App\Shared\Domain\User\UserId;
use App\User\Application\Create\UserCreator;
use App\User\Domain\UserName;
use App\User\Domain\UserPassword;
use App\User\Domain\UserRoles;
use Symfony\Component\Console\Command\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
use Symfony\Component\Console\Question\Question;
use Symfony\Component\Console\Style\SymfonyStyle;
class CreateUserCommand extends Command {
protected static $defaultName = 'app:create-user';
private $userCreator;
public function __construct(UserCreator $userCreator) {
parent::__construct();
$this->userCreator = $userCreator;
}
protected function configure() {
$this
->setDescription('Create user');
}
private function userRoles(InputInterface $input, OutputInterface $output) {
$io = new SymfonyStyle($input, $output);
$helper = $this->getHelper('question');
$roles = [];
$question = new Question('Add role to this user? (y/n):');
$addRoles = $helper->ask($input, $output, $question);
if ($addRoles == 'y' || $addRoles == 'yes') {
do {
$question = new Question('Role name:');
$role = $helper->ask($input, $output, $question);
$availableRoles = UserRoles::getAvailableRoles();
if (!in_array($role, $availableRoles)) {
$io->error(
sprintf(
'The role must be one of %s',
implode(', ', $availableRoles)
)
);
continue;
}
$roles[] = $role;
$question = new Question('Add other role to this user? (y/n):');
$addRoles = $helper->ask($input, $output, $question);
} while ($addRoles == 'y' || $addRoles == 'yes');
}
return $roles;
}
protected function execute(InputInterface $input, OutputInterface $output): int {
$io = new SymfonyStyle($input, $output);
$helper = $this->getHelper('question');
$question = new Question('Username:');
$username = $helper->ask($input, $output, $question);
if (!$username) {
throw new \Exception('No username provided!');
}
$question = new Question('Password:');
$question->setHidden(true);
$question->setHiddenFallback(false);
$plainPassword = $helper->ask($input, $output, $question);
if (!$plainPassword) {
throw new \Exception('No password provided!');
}
$roles = $this->userRoles($input, $output);
$user = $this->userCreator->__invoke(
UserId::create(),
new UserName($username),
new UserPassword($plainPassword),
new UserRoles($roles)
);
$output->writeln('Username: ' . $user->getUsername());
$output->writeln('Password: Your password');
$output->writeln('Roles: ' . implode(', ', $user->getRoles()));
$io->success('User created!');
return Command::SUCCESS;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.