_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q26500 | Barby.Code93.c_checksum | test | def c_checksum
sum = 0
checksum_values.each_with_index do |value, index|
sum += ((index % 20) + 1) * value
end
sum % 47
end | ruby | {
"resource": ""
} |
q26501 | Barby.Code93.k_checksum | test | def k_checksum
sum = 0
checksum_values_with_c_checksum.each_with_index do |value, index|
sum += ((index % 15) + 1) * value
end
sum % 47
end | ruby | {
"resource": ""
} |
q26502 | Barby.PDFWriterOutputter.annotate_pdf | test | def annotate_pdf(pdf, options={})
with_options options do
xpos, ypos = x, y
orig_xpos = xpos
if barcode.two_dimensional?
boolean_groups.reverse_each do |groups|
groups.each do |bar,amount|
if bar
pdf.move_to(xpos, ypos).
line_to(xpos, ypos+xdim).
line_to(xpos+(xdim*amount), ypos+xdim).
line_to(xpos+(xdim*amount), ypos).
line_to(xpos, ypos).
fill
end
xpos += (xdim*amount)
end
xpos = orig_xpos
ypos += xdim
end
else
boolean_groups.each do |bar,amount|
if bar
pdf.move_to(xpos, ypos).
line_to(xpos, ypos+height).
line_to(xpos+(xdim*amount), ypos+height).
line_to(xpos+(xdim*amount), ypos).
line_to(xpos, ypos).
fill
end
xpos += (xdim*amount)
end
end
end
pdf
end | ruby | {
"resource": ""
} |
q26503 | Barby.Code39.characters | test | def characters
chars = raw_characters
extended ? chars.map{|c| EXTENDED_ENCODINGS[c].split(//) }.flatten : chars
end | ruby | {
"resource": ""
} |
q26504 | Barby.Code128.characters | test | def characters
chars = data.split(//n)
if type == 'C'
result = []
count = 0
while count < chars.size
if chars[count] =~ /^\d$/
#If encountering a digit, next char/byte *must* be second digit in pair. I.e. if chars[count] is 5,
#chars[count+1] must be /[0-9]/, otherwise it's not valid
result << "#{chars[count]}#{chars[count+1]}"
count += 2
else
result << chars[count]
count += 1
end
end
result
else
chars
end
end | ruby | {
"resource": ""
} |
q26505 | Barby.Code128.checksum | test | def checksum
pos = 0
(numbers+extra_numbers).inject(start_num) do |sum,number|
pos += 1
sum + (number * pos)
end % 103
end | ruby | {
"resource": ""
} |
q26506 | Barby.Code25.encoding_for_bars | test | def encoding_for_bars(*bars)
wide, narrow, space = wide_encoding, narrow_encoding, space_encoding
bars.flatten.inject '' do |enc,bar|
enc + (bar == WIDE ? wide : narrow) + space
end
end | ruby | {
"resource": ""
} |
q26507 | Barby.CairoOutputter.render_to_cairo_context | test | def render_to_cairo_context(context, options={})
if context.respond_to?(:have_current_point?) and
context.have_current_point?
current_x, current_y = context.current_point
else
current_x = x(options) || margin(options)
current_y = y(options) || margin(options)
end
_xdim = xdim(options)
_height = height(options)
original_current_x = current_x
context.save do
context.set_source_color(:black)
context.fill do
if barcode.two_dimensional?
boolean_groups.each do |groups|
groups.each do |bar,amount|
current_width = _xdim * amount
if bar
context.rectangle(current_x, current_y, current_width, _xdim)
end
current_x += current_width
end
current_x = original_current_x
current_y += _xdim
end
else
boolean_groups.each do |bar,amount|
current_width = _xdim * amount
if bar
context.rectangle(current_x, current_y, current_width, _height)
end
current_x += current_width
end
end
end
end
context
end | ruby | {
"resource": ""
} |
q26508 | Barby.CairoOutputter.to_png | test | def to_png(options={})
output_to_string_io do |io|
Cairo::ImageSurface.new(options[:format],
full_width(options),
full_height(options)) do |surface|
render(surface, options)
surface.write_to_png(io)
end
end
end | ruby | {
"resource": ""
} |
q26509 | Barby.CairoOutputter.to_ps | test | def to_ps(options={})
output_to_string_io do |io|
Cairo::PSSurface.new(io,
full_width(options),
full_height(options)) do |surface|
surface.eps = options[:eps] if surface.respond_to?(:eps=)
render(surface, options)
end
end
end | ruby | {
"resource": ""
} |
q26510 | Barby.CairoOutputter.to_pdf | test | def to_pdf(options={})
output_to_string_io do |io|
Cairo::PDFSurface.new(io,
full_width(options),
full_height(options)) do |surface|
render(surface, options)
end
end
end | ruby | {
"resource": ""
} |
q26511 | Barby.CairoOutputter.to_svg | test | def to_svg(options={})
output_to_string_io do |io|
Cairo::SVGSurface.new(io,
full_width(options),
full_height(options)) do |surface|
render(surface, options)
end
end
end | ruby | {
"resource": ""
} |
q26512 | CMSScanner.Browser.max_threads= | test | def max_threads=(number)
@max_threads = number.to_i.positive? && throttle.zero? ? number.to_i : 1
hydra.max_concurrency = @max_threads
end | ruby | {
"resource": ""
} |
q26513 | CMSScanner.WebSite.online? | test | def online?(path = nil)
NS::Browser.get(url(path)).code.nonzero? ? true : false
end | ruby | {
"resource": ""
} |
q26514 | CMSScanner.WebSite.head_and_get | test | def head_and_get(path, codes = [200], params = {})
url_to_get = url(path)
head_params = (params[:head] || {}).merge(head_or_get_params)
head_res = NS::Browser.forge_request(url_to_get, head_params).run
codes.include?(head_res.code) ? NS::Browser.get(url_to_get, params[:get] || {}) : head_res
end | ruby | {
"resource": ""
} |
q26515 | GHTorrent.Mirror.db | test | def db
return @db unless @db.nil?
Sequel.single_threaded = true
@db = Sequel.connect(config(:sql_url), :encoding => 'utf8')
#@db.loggers << Logger.new(STDOUT)
if @db.tables.empty?
dir = File.join(File.dirname(__FILE__), 'migrations')
puts "Database empty, running migrations from #{dir}"
Sequel.extension :migration
Sequel::Migrator.apply(@db, dir)
end
@db
end | ruby | {
"resource": ""
} |
q26516 | GHTorrent.Mirror.ensure_commit | test | def ensure_commit(repo, sha, user, comments = true)
ensure_repo(user, repo)
c = retrieve_commit(repo, sha, user)
if c.nil?
warn "Commit #{user}/#{repo} -> #{sha} does not exist"
return
end
stored = store_commit(c, repo, user)
ensure_parents(c)
if not c['commit']['comment_count'].nil? \
and c['commit']['comment_count'] > 0
ensure_commit_comments(user, repo, sha) if comments
end
ensure_repo_commit(user, repo, sha)
stored
end | ruby | {
"resource": ""
} |
q26517 | GHTorrent.Mirror.ensure_parents | test | def ensure_parents(commit)
commits = db[:commits]
parents = db[:commit_parents]
commit['parents'].map do |p|
save do
url = p['url'].split(/\//)
this = commits.first(:sha => commit['sha'])
parent = commits.first(:sha => url[7])
if parent.nil?
c = retrieve_commit(url[5], url[7], url[4])
if c.nil?
warn "Could not retrieve commit_parent #{url[4]}/#{url[5]} -> #{url[7]} to #{this[:sha]}"
next
end
parent = store_commit(c, url[5], url[4])
end
if parent.nil?
warn "Could not find #{url[4]}/#{url[5]} -> #{url[7]}, parent to commit #{this[:sha]}"
next
end
if parents.first(:commit_id => this[:id],
:parent_id => parent[:id]).nil?
parents.insert(:commit_id => this[:id],
:parent_id => parent[:id])
info "Added commit_parent #{parent[:sha]} to commit #{this[:sha]}"
else
debug "Parent #{parent[:sha]} for commit #{this[:sha]} exists"
end
parents.first(:commit_id => this[:id], :parent_id => parent[:id])
end
end.select{|x| !x.nil?}
end | ruby | {
"resource": ""
} |
q26518 | GHTorrent.Mirror.ensure_user_followers | test | def ensure_user_followers(followed)
curuser = ensure_user(followed, false, false)
followers = db.from(:followers, :users).\
where(Sequel.qualify('followers', 'follower_id') => Sequel.qualify('users', 'id')).\
where(Sequel.qualify('followers', 'user_id') => curuser[:id]).select(:login).all
retrieve_user_followers(followed).reduce([]) do |acc, x|
if followers.find {|y| y[:login] == x['login']}.nil?
acc << x
else
acc
end
end.map { |x| save{ensure_user_follower(followed, x['login']) }}.select{|x| !x.nil?}
end | ruby | {
"resource": ""
} |
q26519 | GHTorrent.Mirror.ensure_user_follower | test | def ensure_user_follower(followed, follower, date_added = nil)
follower_user = ensure_user(follower, false, false)
followed_user = ensure_user(followed, false, false)
if followed_user.nil? or follower_user.nil?
warn "Could not find follower #{follower} or user #{followed}"
return
end
followers = db[:followers]
follower_id = follower_user[:id]
followed_id = followed_user[:id]
follower_exists = followers.first(:user_id => followed_id,
:follower_id => follower_id)
if follower_exists.nil?
added = if date_added.nil?
max(follower_user[:created_at], followed_user[:created_at])
else
date_added
end
retrieved = retrieve_user_follower(followed, follower)
if retrieved.nil?
warn "Could not retrieve follower #{follower} for #{followed}"
return
end
followers.insert(:user_id => followed_id,
:follower_id => follower_id,
:created_at => added)
info "Added follower #{follower} to #{followed}"
else
debug "Follower #{follower} for user #{followed} exists"
end
unless date_added.nil?
followers.filter(:user_id => followed_id, :follower_id => follower_id)
.update(:created_at => date(date_added))
info "Updated follower #{followed} -> #{follower}, created_at -> #{date(date_added)}"
end
followers.first(:user_id => followed_id, :follower_id => follower_id)
end | ruby | {
"resource": ""
} |
q26520 | GHTorrent.Mirror.ensure_user_byemail | test | def ensure_user_byemail(email, name)
users = db[:users]
usr = users.first(:email => email)
if usr.nil?
u = retrieve_user_byemail(email, name)
if u.nil? or u['login'].nil?
warn "Could not retrieve user #{email} through search API query"
login = (0...8).map { 65.+(rand(25)).chr }.join
users.insert(:email => email,
:name => name,
:login => login,
:fake => true,
:deleted => false,
:created_at => Time.now)
info "Added user fake #{login} -> #{email}"
users.first(:login => login)
else
in_db = users.first(:login => u['login'])
geo = geolocate(location: u['location'])
if in_db.nil?
users.insert(:login => u['login'],
:name => u['name'],
:company => u['company'],
:email => u['email'],
:long => geo[:long],
:lat => geo[:lat],
:country_code => geo[:country_code],
:state => geo[:state],
:city => geo[:city],
:fake => false,
:deleted => false,
:created_at => date(u['created_at']))
info "Added user #{u['login']} (#{email}) through search API query"
else
in_db.update(:name => u['name'],
:company => u['company'],
:email => u['email'],
:long => geo[:long],
:lat => geo[:lat],
:country_code => geo[:country_code],
:state => geo[:state],
:city => geo[:city],
:fake => false,
:deleted => false,
:created_at => date(u['created_at']))
debug "User #{u['login']} with email #{email} exists"
end
users.first(:login => u['login'])
end
else
debug "User with email #{email} exists"
usr
end
end | ruby | {
"resource": ""
} |
q26521 | GHTorrent.Mirror.ensure_repo | test | def ensure_repo(user, repo, recursive = false)
repos = db[:projects]
curuser = ensure_user(user, false, false)
if curuser.nil?
warn "Could not find user #{user}"
return
end
currepo = repos.first(:owner_id => curuser[:id], :name => repo)
unless currepo.nil?
debug "Repo #{user}/#{repo} exists"
return refresh_repo(user, repo, currepo)
end
r = retrieve_repo(user, repo, true)
if r.nil?
warn "Could not retrieve repo #{user}/#{repo}"
return
end
if r['owner']['login'] != curuser[:login]
info "Repo changed owner from #{curuser[:login]} to #{r['owner']['login']}"
curuser = ensure_user(r['owner']['login'], false, false)
end
repos.insert(:url => r['url'],
:owner_id => curuser[:id],
:name => r['name'],
:description => unless r['description'].nil? then r['description'][0..254] else nil end,
:language => r['language'],
:created_at => date(r['created_at']),
:updated_at => date(Time.now),
:etag => unless r['etag'].nil? then r['etag'] end)
unless r['parent'].nil?
parent_owner = r['parent']['owner']['login']
parent_repo = r['parent']['name']
parent = ensure_repo(parent_owner, parent_repo)
if parent.nil?
warn "Could not find repo #{parent_owner}/#{parent_repo}, parent of: #{user}/#{repo}"
repos.filter(:owner_id => curuser[:id], :name => repo).update(:forked_from => -1)
else
repos.filter(:owner_id => curuser[:id], :name => repo).update(:forked_from => parent[:id])
info "Repo #{user}/#{repo} is a fork of #{parent_owner}/#{parent_repo}"
unless ensure_fork_point(user, repo).nil?
warn "Could not find fork point for #{user}/#{repo}, fork of #{parent_owner}/#{parent_repo}"
end
end
end
if recursive and not ensure_repo_recursive(user, repo)
warn "Could retrieve #{user}/#{repo} recursively"
return nil
end
info "Added repo #{user}/#{repo}"
return repos.first(:owner_id => curuser[:id], :name => repo)
end | ruby | {
"resource": ""
} |
q26522 | GHTorrent.Mirror.ensure_languages | test | def ensure_languages(owner, repo)
currepo = ensure_repo(owner, repo)
langs = retrieve_languages(owner, repo)
if langs.nil? or langs.empty?
warn "Could not find languages for repo #{owner}/#{repo}"
return
end
ts = Time.now
langs.keys.each do |lang|
db[:project_languages].insert(
:project_id => currepo[:id],
:language => lang.downcase,
:bytes => langs[lang],
:created_at => ts
)
info "Added project_language #{owner}/#{repo} -> #{lang} (#{langs[lang]} bytes)"
end
db[:project_languages].where(:project_id => currepo[:id]).where(:created_at => ts).all
end | ruby | {
"resource": ""
} |
q26523 | GHTorrent.Mirror.ensure_fork_commits | test | def ensure_fork_commits(owner, repo, parent_owner, parent_repo)
currepo = ensure_repo(owner, repo)
if currepo.nil?
warn "Could not find repo #{owner}/#{repo}"
return
end
parent = ensure_repo(parent_owner, parent_repo)
if parent.nil?
warn "Could not find repo #{parent_owner}/#{parent_repo}, parent of #{owner}/#{repo}"
return
end
strategy = case
when config(:fork_commits).match(/all/i)
:all
when config(:fork_commits).match(/fork_point/i)
:fork_point
when config(:fork_commits).match(/none/i)
:none
else
:fork_point
end
fork_commit = ensure_fork_point(owner, repo)
if fork_commit.nil? or fork_commit.empty?
warn "Could not find fork commit for repo #{owner}/#{repo}. Retrieving all commits."
return ensure_commits(owner, repo, fork_all: true)
end
debug "Retrieving commits for fork #{owner}/#{repo}: strategy is #{strategy}"
return if strategy == :none
if strategy == :fork_point
# Retrieve commits up to fork point (fork_commit strategy)
info "Retrieving commits for #{owner}/#{repo} until fork commit #{fork_commit[:sha]}"
master_branch = retrieve_default_branch(parent_owner, parent_repo)
return if master_branch.nil?
sha = master_branch
found = false
while not found
commits = retrieve_commits(repo, sha, owner, 1)
# This means that we retrieved no commits
if commits.size == 0
break
end
# This means we retrieved the last page again
if commits.size == 1 and commits[0]['sha'] == sha
break
end
for c in commits
ensure_commit(repo, c['sha'], owner)
sha = c['sha']
if c['sha'] == fork_commit[:sha]
found = true
break
end
end
end
end
if strategy == :all
shared_commit = db[:commits].first(:sha => fork_commit)
copied = 0
to_copy = db.from(:project_commits, :commits).\
where(Sequel.qualify('project_commits', 'commit_id') => Sequel.qualify('commits', 'id')).\
where(Sequel.qualify('project_commits', 'project_id') => parent[:id]).\
where('commits.created_at < ?', shared_commit[:created_at]).\
select(Sequel.qualify('commits','id'))
to_copy.each do |c|
copied += 1
begin
db[:project_commits].insert(
:project_id => currepo[:id],
:commit_id => c[:id]
)
debug "Copied commit #{c[:sha]} #{parent_owner}/#{parent_repo} -> #{owner}/#{repo} (#{copied} total)"
rescue StandardError => e
warn "Could not copy commit #{c[:sha]} #{parent_owner}/#{parent_repo} -> #{owner}/#{repo} : #{e.message}"
end
end
info "Finished copying commits from #{parent_owner}/#{parent_repo} -> #{owner}/#{repo}: #{copied} total"
end
end | ruby | {
"resource": ""
} |
q26524 | GHTorrent.Mirror.ensure_fork_point | test | def ensure_fork_point(owner, repo)
fork = ensure_repo(owner, repo, false)
if fork[:forked_from].nil?
warn "Repo #{owner}/#{repo} is not a fork"
return nil
end
# Return commit if already specified
unless fork[:forked_commit_id].nil?
commit = db[:commits].where(:id => fork[:forked_commit_id]).first
return commit unless commit.nil?
end
parent = db.from(:projects, :users).\
where(Sequel.qualify('projects', 'owner_id') => Sequel.qualify('users', 'id')).\
where(Sequel.qualify('projects', 'id') => fork[:forked_from]).\
select(Sequel.qualify('users', 'login'), Sequel.qualify('projects','name')).first
if parent.nil?
warn "Unknown parent for repo #{owner}/#{repo}"
return nil
end
default_branch = retrieve_default_branch(parent[:login], parent[:name])
# Retrieve diff between parent and fork master branch
diff = retrieve_master_branch_diff(owner, repo, default_branch, parent[:login], parent[:name], default_branch)
if diff.nil? or diff.empty?
# Try a bit harder by refreshing the default branch
default_branch = retrieve_default_branch(parent[:login], parent[:name], true)
diff = retrieve_master_branch_diff(owner, repo, default_branch, parent[:login], parent[:name], default_branch)
end
if diff.nil? or diff.empty?
# This means that the are no common ancestors between the repos
# This can apparently happen when the parent repo was renamed or force-pushed
# example: https://github.com/openzipkin/zipkin/compare/master...aa1wi:master
warn "No common ancestor between #{parent[:login]}/#{parent[:name]} and #{owner}/#{repo}"
return nil
else
debug "Fork #{owner}/#{repo} is #{diff['ahead_by']} commits ahead and #{diff['behind_by']} commits behind #{parent[:login]}/#{parent[:name]}"
end
if diff['ahead_by'].to_i > 0
# This means that the fork has diverged, and we need to search through the fork
# commit graph for the earliest commit that is shared with the parent. GitHub's
# diff contains a list of divergent commits. We are sorting those by date
# and select the earliest one. We do date sort instead of graph walking as this
# would be prohibetively slow if the commits for the parent did not exist.
earliest_diverging = diff['commits'].sort_by{|x| x['commit']['author']['date']}.first
if earliest_diverging['parents'].nil?
# this means that the repo was forked from the from the parent repo's initial commit. thus, they both share an initial commit.
# example: https://api.github.com/repos/btakita/pain-point/compare/master...spent:master
likely_fork_point = ensure_commit(parent[:name], earliest_diverging['sha'], parent['login'])
else
# Make sure that all likely fork points exist for the parent project
# and select the latest of them.
# https://github.com/gousiosg/github-mirror/compare/master...pombredanne:master
likely_fork_point = earliest_diverging['parents'].\
map{ |x| ensure_commit(parent[:name], x['sha'], parent[:login])}.\
select{|x| !x.nil?}.\
sort_by { |x| x[:created_at]}.\
last
end
forked_sha = likely_fork_point[:sha]
else
# This means that the fork has not diverged.
forked_sha = diff['merge_base_commit']['sha']
end
forked_commit = ensure_commit(repo, forked_sha, owner);
debug "Fork commit for #{owner}/#{repo} is #{forked_sha}"
unless forked_commit.nil?
db[:projects].filter(:id => fork[:id]).update(:forked_commit_id => forked_commit[:id])
info "Repo #{owner}/#{repo} was forked at #{parent[:login]}/#{parent[:name]}:#{forked_sha}"
end
db[:commits].where(:sha => forked_sha).first
end | ruby | {
"resource": ""
} |
q26525 | GHTorrent.Mirror.ensure_orgs | test | def ensure_orgs(user)
retrieve_orgs(user).map{|o| save{ensure_participation(user, o['login'])}}.select{|x| !x.nil?}
end | ruby | {
"resource": ""
} |
q26526 | GHTorrent.Mirror.ensure_participation | test | def ensure_participation(user, organization, members = true)
org = ensure_org(organization, members)
if org.nil?
warn "Could not find organization #{organization}"
return
end
usr = ensure_user(user, false, false)
org_members = db[:organization_members]
participates = org_members.first(:user_id => usr[:id], :org_id => org[:id])
if participates.nil?
org_members.insert(:user_id => usr[:id],
:org_id => org[:id])
info "Added participation #{organization} -> #{user}"
org_members.first(:user_id => usr[:id], :org_id => org[:id])
else
debug "Participation #{organization} -> #{user} exists"
participates
end
end | ruby | {
"resource": ""
} |
q26527 | GHTorrent.Mirror.ensure_org | test | def ensure_org(organization, members = true)
org = db[:users].first(:login => organization, :type => 'org')
if org.nil?
org = ensure_user(organization, false, false)
# Not an organization, don't go ahead
if org[:type] != 'ORG'
warn "User #{organization} is not an organization"
return nil
end
end
if members
retrieve_org_members(organization).map do |x|
ensure_participation(ensure_user(x['login'], false, false)[:login],
organization, false)
end
end
org
end | ruby | {
"resource": ""
} |
q26528 | GHTorrent.Mirror.ensure_commit_comments | test | def ensure_commit_comments(user, repo, sha)
commit_id = db[:commits].first(:sha => sha)[:id]
stored_comments = db[:commit_comments].filter(:commit_id => commit_id)
commit_comments = retrieve_commit_comments(user, repo, sha)
not_saved = commit_comments.reduce([]) do |acc, x|
if stored_comments.find{|y| y[:comment_id] == x['id']}.nil?
acc << x
else
acc
end
end
not_saved.map{|x| save{ensure_commit_comment(user, repo, sha, x['id'])}}.select{|x| !x.nil?}
end | ruby | {
"resource": ""
} |
q26529 | GHTorrent.Mirror.ensure_watchers | test | def ensure_watchers(owner, repo)
currepo = ensure_repo(owner, repo)
if currepo.nil?
warn "Could not find repo #{owner}/#{repo} for retrieving watchers"
return
end
watchers = db.from(:watchers, :users).\
where(Sequel.qualify('watchers', 'user_id') => Sequel.qualify('users', 'id')).\
where(Sequel.qualify('watchers', 'repo_id') => currepo[:id]).select(:login).all
retrieve_watchers(owner, repo).reduce([]) do |acc, x|
if watchers.find { |y|
y[:login] == x['login']
}.nil?
acc << x
else
acc
end
end.map { |x| save{ensure_watcher(owner, repo, x['login']) }}.select{|x| !x.nil?}
end | ruby | {
"resource": ""
} |
q26530 | GHTorrent.Mirror.ensure_pull_requests | test | def ensure_pull_requests(owner, repo, refresh = false)
currepo = ensure_repo(owner, repo)
if currepo.nil?
warn "Could not find repo #{owner}/#{repo} for retrieving pull requests"
return
end
raw_pull_reqs = if refresh
retrieve_pull_requests(owner, repo, refresh = true)
else
pull_reqs = db[:pull_requests].filter(:base_repo_id => currepo[:id]).all
retrieve_pull_requests(owner, repo).reduce([]) do |acc, x|
if pull_reqs.find { |y| y[:pullreq_id] == x['number'] }.nil?
acc << x
else
acc
end
end
end
raw_pull_reqs.map { |x| save { ensure_pull_request(owner, repo, x['number']) } }.select { |x| !x.nil? }
end | ruby | {
"resource": ""
} |
q26531 | GHTorrent.Mirror.ensure_pull_request_history | test | def ensure_pull_request_history(id, ts, act, actor)
user = unless actor.nil?
ensure_user(actor, false, false)
end
pull_req_history = db[:pull_request_history]
entry = if ['opened', 'merged'].include? act
pull_req_history.first(:pull_request_id => id,
:action => act)
else
pull_req_history.first(:pull_request_id => id,
:created_at => (ts - 3)..(ts + 3),
:action => act)
end
if entry.nil?
pull_req_history.insert(:pull_request_id => id,
:created_at => ts,
:action => act,
:actor_id => unless user.nil? then user[:id] end)
info "Added pullreq_event (#{id}) -> (#{act}) by (#{actor}) timestamp #{ts}"
else
debug "Pull request (#{id}) event (#{act}) by (#{actor}) timestamp #{ts} exists"
if entry[:actor_id].nil? and not user.nil?
pull_req_history.where(:pull_request_id => id,
:created_at => (ts - 3)..(ts + 3),
:action => act)\
.update(:actor_id => user[:id])
info "Updated pull request (#{id}) event (#{act}) timestamp #{ts}, actor -> #{user[:login]}"
end
end
end | ruby | {
"resource": ""
} |
q26532 | GHTorrent.Mirror.pr_is_intra_branch | test | def pr_is_intra_branch(req)
return false unless pr_has_head_repo(req)
if req['head']['repo']['owner']['login'] ==
req['base']['repo']['owner']['login'] and
req['head']['repo']['full_name'] == req['base']['repo']['full_name']
true
else
false
end
end | ruby | {
"resource": ""
} |
q26533 | GHTorrent.Mirror.ensure_forks | test | def ensure_forks(owner, repo)
currepo = ensure_repo(owner, repo)
if currepo.nil?
warn "Could not find repo #{owner}/#{repo} for retrieving forks"
return
end
existing_forks = db.from(:projects, :users).\
where(Sequel.qualify('users', 'id') => Sequel.qualify('projects', 'owner_id')). \
where(Sequel.qualify('projects', 'forked_from') => currepo[:id]).\
select(Sequel.qualify('projects', 'name'), :login).all
retrieve_forks(owner, repo).reduce([]) do |acc, x|
if existing_forks.find do |y|
forked_repo_owner = x['url'].split(/\//)[4]
forked_repo_name = x['url'].split(/\//)[5]
y[:login] == forked_repo_owner && y[:name] == forked_repo_name
end.nil?
acc << x
else
acc
end
end.map { |x| save{ensure_fork(owner, repo, x['id']) }}.select{|x| !x.nil?}
end | ruby | {
"resource": ""
} |
q26534 | GHTorrent.Mirror.ensure_fork | test | def ensure_fork(owner, repo, fork_id)
fork = retrieve_fork(owner, repo, fork_id)
if fork.nil?
warn "Could not retrieve fork #{owner}/#{repo} -> #{fork_id}"
return
end
fork_name = if fork['full_name'].nil? then fork['url'].split(/\//)[4..5].join('/') else fork['full_name'] end
fork_owner = fork_name.split(/\//)[0]
fork_name = fork_name.split(/\//)[1]
r = ensure_repo(fork_owner, fork_name, true)
if r.nil?
warn "Could not add #{fork_owner}/#{fork_name} as fork of #{owner}/#{repo}"
else
info "Added fork #{fork_owner}/#{fork_name} of #{owner}/#{repo}"
end
r
end | ruby | {
"resource": ""
} |
q26535 | GHTorrent.Mirror.ensure_issues | test | def ensure_issues(owner, repo)
currepo = ensure_repo(owner, repo)
if currepo.nil?
warn "Could not find repo #{owner}/#{repo} for retrieving issues"
return
end
issues = db[:issues].filter(:repo_id => currepo[:id]).all
raw_issues = retrieve_issues(owner, repo).reduce([]) do |acc, x|
if issues.find { |y| y[:issue_id] == x['number'] }.nil?
acc << x
else
acc
end
end
raw_issues.map { |x| save { ensure_issue(owner, repo, x['number']) } }.select { |x| !x.nil? }
end | ruby | {
"resource": ""
} |
q26536 | GHTorrent.Mirror.ensure_issue | test | def ensure_issue(owner, repo, issue_id, events = true,
comments = true, labels = true)
issues = db[:issues]
repository = ensure_repo(owner, repo)
if repository.nil?
warn "Could not find repo #{owner}/#{repo} for retrieving issue #{issue_id}"
return
end
cur_issue = issues.first(:issue_id => issue_id,
:repo_id => repository[:id])
retrieved = retrieve_issue(owner, repo, issue_id)
if retrieved.nil?
warn "Could not retrieve issue #{owner}/#{repo} -> #{issue_id}"
return
end
# Pull requests and issues share the same issue_id
pull_req = unless retrieved['pull_request'].nil? or
retrieved['pull_request']['patch_url'].nil?
debug "Issue #{owner}/#{repo}->#{issue_id} is a pull request"
ensure_pull_request(owner, repo, issue_id, false, false, false)
end
if cur_issue.nil?
reporter = ensure_user(retrieved['user']['login'], false, false)
assignee = unless retrieved['assignee'].nil?
ensure_user(retrieved['assignee']['login'], false, false)
end
issues.insert(:repo_id => repository[:id],
:assignee_id => unless assignee.nil? then assignee[:id] end,
:reporter_id => reporter[:id],
:issue_id => issue_id,
:pull_request => if pull_req.nil? then false else true end,
:pull_request_id => unless pull_req.nil? then pull_req[:id] end,
:created_at => date(retrieved['created_at']))
info "Added issue #{owner}/#{repo} -> #{issue_id}"
else
debug "Issue #{owner}/#{repo}->#{issue_id} exists"
if cur_issue[:pull_request] == false and not pull_req.nil?
info "Updated issue #{owner}/#{repo}->#{issue_id} as pull request"
issues.filter(:issue_id => issue_id, :repo_id => repository[:id]).update(
:pull_request => true,
:pull_request_id => pull_req[:id])
end
end
ensure_issue_events(owner, repo, issue_id) if events
ensure_issue_comments(owner, repo, issue_id) if comments
ensure_issue_labels(owner, repo, issue_id) if labels
issues.first(:issue_id => issue_id,
:repo_id => repository[:id])
end | ruby | {
"resource": ""
} |
q26537 | GHTorrent.Mirror.ensure_issue_events | test | def ensure_issue_events(owner, repo, issue_id)
currepo = ensure_repo(owner, repo)
if currepo.nil?
warn "Could not find repository #{owner}/#{repo} for retrieving events for issue #{issue_id}"
return
end
issue = ensure_issue(owner, repo, issue_id, false, false, false)
if issue.nil?
warn "Could not find issue #{owner}/#{repo} -> #{issue_id} for retrieving events"
return
end
retrieve_issue_events(owner, repo, issue_id).reduce([]) do |acc, x|
if db[:issue_events].first(:issue_id => issue[:id],
:event_id => x['id']).nil?
acc << x
else
acc
end
end.map { |x|
save{ensure_issue_event(owner, repo, issue_id, x['id'])}
}.select{|x| !x.nil?}
end | ruby | {
"resource": ""
} |
q26538 | GHTorrent.Mirror.ensure_issue_event | test | def ensure_issue_event(owner, repo, issue_id, event_id)
issue = ensure_issue(owner, repo, issue_id, false, false, false)
if issue.nil?
warn "Could not find issue #{owner}/#{repo} -> #{issue_id} for retrieving event #{event_id}"
return
end
issue_event_str = "#{owner}/#{repo} -> #{issue_id}/#{event_id}"
curevent = db[:issue_events].first(:issue_id => issue[:id],
:event_id => event_id)
if curevent.nil?
retrieved = retrieve_issue_event(owner, repo, issue_id, event_id)
if retrieved.nil?
warn "Could not retrieve issue_event #{owner}/#{repo} -> #{issue_id}/#{issue_event_str}"
return
elsif retrieved['actor'].nil?
warn "Could not find issue_event_actor #{owner}/#{repo} -> #{issue_id}/#{issue_event_str}"
return
end
actor = ensure_user(retrieved['actor']['login'], false, false)
action_specific = case retrieved['event']
when "referenced" then retrieved['commit_id']
when "merged" then retrieved['commit_id']
when "closed" then retrieved['commit_id']
else nil
end
if retrieved['event'] == 'assigned'
def update_assignee(owner, repo, issue, actor)
db[:issues].first(:id => issue[:id]).update(:assignee_id => actor[:id])
info "Updated #{owner}/#{repo} -> #{issue[:id]}, assignee -> #{actor[:id]}"
end
if issue[:assignee_id].nil? then
update_assignee(owner, repo, issue, actor)
else
existing = db[:issue_events].\
filter(:issue_id => issue[:id],:action => 'assigned').\
order(Sequel.desc(:created_at)).first
if existing.nil?
update_assignee(owner, repo, issue, actor)
elsif date(existing[:created_at]) < date(retrieved['created_at'])
update_assignee(owner, repo, issue, actor)
end
end
end
db[:issue_events].insert(
:event_id => event_id,
:issue_id => issue[:id],
:actor_id => unless actor.nil? then actor[:id] end,
:action => retrieved['event'],
:action_specific => action_specific,
:created_at => date(retrieved['created_at']))
info "Added issue_event #{owner}/#{repo} -> #{issue_id}/#{issue_event_str}"
db[:issue_events].first(:issue_id => issue[:id],
:event_id => event_id)
else
debug "Issue event #{owner}/#{repo} -> #{issue_id}/#{issue_event_str} exists"
curevent
end
end | ruby | {
"resource": ""
} |
q26539 | GHTorrent.Mirror.ensure_issue_comments | test | def ensure_issue_comments(owner, repo, issue_id, pull_req_id = nil)
currepo = ensure_repo(owner, repo)
if currepo.nil?
warn "Could not find repository #{owner}/#{repo} for retrieving issue comments for issue #{issue_id}"
return
end
issue = if pull_req_id.nil?
ensure_issue(owner, repo, issue_id, false, false, false)
else
db[:issues].first(:pull_request_id => pull_req_id)
end
if issue.nil?
warn "Could not find issue #{owner}/#{repo} -> #{issue_id} for retrieving issue comments"
return
end
retrieve_issue_comments(owner, repo, issue_id).reduce([]) do |acc, x|
if db[:issue_comments].first(:issue_id => issue[:id],
:comment_id => x['id']).nil?
acc << x
else
acc
end
end.map { |x|
save{ensure_issue_comment(owner, repo, issue_id, x['id'], pull_req_id)}
}.select{|x| !x.nil?}
end | ruby | {
"resource": ""
} |
q26540 | GHTorrent.Mirror.ensure_issue_comment | test | def ensure_issue_comment(owner, repo, issue_id, comment_id, pull_req_id = nil)
issue = if pull_req_id.nil?
ensure_issue(owner, repo, issue_id, false, false, false)
else
db[:issues].first(:pull_request_id => pull_req_id)
end
if issue.nil?
warn "Could not find issue #{owner}/#{repo} -> #{issue_id} for retrieving comment #{comment_id}"
return
end
issue_comment_str = "#{owner}/#{repo} -> #{issue_id}/#{comment_id}"
curcomment = db[:issue_comments].first(:issue_id => issue[:id],
:comment_id => comment_id)
if curcomment.nil?
retrieved = retrieve_issue_comment(owner, repo, issue_id, comment_id)
if retrieved.nil?
warn "Could not retrieve issue_comment #{issue_comment_str}"
return
end
user = ensure_user(retrieved['user']['login'], false, false)
db[:issue_comments].insert(
:comment_id => comment_id,
:issue_id => issue[:id],
:user_id => unless user.nil? then user[:id] end,
:created_at => date(retrieved['created_at'])
)
info "Added issue_comment #{issue_comment_str}"
db[:issue_comments].first(:issue_id => issue[:id],
:comment_id => comment_id)
else
debug "Issue comment #{issue_comment_str} exists"
curcomment
end
end | ruby | {
"resource": ""
} |
q26541 | GHTorrent.Mirror.ensure_labels | test | def ensure_labels(owner, repo)
currepo = ensure_repo(owner, repo)
if currepo.nil?
warn "Could not find #{owner}/#{repo} for retrieving issue labels"
return
end
repo_labels = db[:repo_labels].filter(:repo_id => currepo[:id]).all
retrieve_repo_labels(owner, repo).reduce([]) do |acc, x|
if repo_labels.find {|y| y[:name] == x['name']}.nil?
acc << x
else
acc
end
end.map { |x| save { ensure_repo_label(owner, repo, x['name']) } }.select { |x| !x.nil? }
end | ruby | {
"resource": ""
} |
q26542 | GHTorrent.Mirror.ensure_repo_label | test | def ensure_repo_label(owner, repo, name)
currepo = ensure_repo(owner, repo)
if currepo.nil?
warn "Could not find #{owner}/#{repo} for retrieving label #{name}"
return
end
label = db[:repo_labels].first(:repo_id => currepo[:id], :name => name)
if label.nil?
retrieved = retrieve_repo_label(owner, repo, name)
if retrieved.nil?
warn "Could not retrieve repo_label #{owner}/#{repo} -> #{name}"
return
end
db[:repo_labels].insert(
:repo_id => currepo[:id],
:name => name
)
info "Added repo_label #{owner}/#{repo} -> #{name}"
db[:repo_labels].first(:repo_id => currepo[:id], :name => name)
else
label
end
end | ruby | {
"resource": ""
} |
q26543 | GHTorrent.Mirror.ensure_issue_labels | test | def ensure_issue_labels(owner, repo, issue_id)
issue = ensure_issue(owner, repo, issue_id, false, false, false)
if issue.nil?
warn "Could not find issue #{owner}/#{repo} -> #{issue_id} for retrieving labels"
return
end
issue_labels = db.from(:issue_labels, :repo_labels)\
.where(Sequel.qualify('issue_labels', 'label_id') => Sequel.qualify('repo_labels', 'id'))\
.where(Sequel.qualify('issue_labels', 'issue_id') => issue[:id])\
.select(Sequel.qualify('repo_labels', 'name')).all
retrieve_issue_labels(owner, repo, issue_id).reduce([]) do |acc, x|
if issue_labels.find {|y| y[:name] == x['name']}.nil?
acc << x
else
acc
end
end.map { |x| save{ensure_issue_label(owner, repo, issue[:issue_id], x['name']) }}.select{|x| !x.nil?}
end | ruby | {
"resource": ""
} |
q26544 | GHTorrent.Mirror.ensure_issue_label | test | def ensure_issue_label(owner, repo, issue_id, name)
issue = ensure_issue(owner, repo, issue_id, false, false, false)
if issue.nil?
warn "Could not find issue #{owner}/#{repo} -> #{issue_id} to assign label #{name}"
return
end
label = ensure_repo_label(owner, repo, name)
if label.nil?
warn "Could not find repo label #{owner}/#{repo} -> #{name}"
return
end
issue_lbl = db[:issue_labels].first(:label_id => label[:id],
:issue_id => issue[:id])
if issue_lbl.nil?
db[:issue_labels].insert(
:label_id => label[:id],
:issue_id => issue[:id],
)
info "Added issue_label #{name} to issue #{owner}/#{repo} -> #{issue_id}"
db[:issue_labels].first(:label_id => label[:id],
:issue_id => issue[:id])
else
debug "Issue label #{name} to issue #{owner}/#{repo} -> #{issue_id} exists"
issue_lbl
end
end | ruby | {
"resource": ""
} |
q26545 | GHTorrent.Mirror.transaction | test | def transaction(&block)
db
persister
result = nil
start_time = Time.now
begin
db.transaction(:rollback => :reraise, :isolation => :repeatable,
:retry_on => @retry_on_error, :num_retries => 3) do
result = yield block
end
total = Time.now.to_ms - start_time.to_ms
debug "Transaction committed (#{total} ms)"
result
rescue StandardError => e
total = Time.now.to_ms - start_time.to_ms
warn "Transaction failed (#{total} ms)"
raise e
ensure
GC.start
end
end | ruby | {
"resource": ""
} |
q26546 | GHTorrent.Mirror.store_commit | test | def store_commit(c, repo, user)
commits = db[:commits]
commit = commits.first(:sha => c['sha'])
if commit.nil?
author = commit_user(c['author'], c['commit']['author'])
commiter = commit_user(c['committer'], c['commit']['committer'])
repository = ensure_repo(user, repo)
if repository.nil?
warn "Could not find repo #{user}/#{repo} for storing commit #{c['sha']}"
end
commits.insert(:sha => c['sha'],
:author_id => author[:id],
:committer_id => commiter[:id],
:project_id => if repository.nil? then nil else repository[:id] end ,
:created_at => date(c['commit']['author']['date'])
)
info "Added commit #{user}/#{repo} -> #{c['sha']} "
commits.first(:sha => c['sha'])
else
debug "Commit #{user}/#{repo} -> #{c['sha']} exists"
commit
end
end | ruby | {
"resource": ""
} |
q26547 | GHTorrent.Logging.log | test | def log(level, msg)
case level
when :fatal then
loggerr.fatal (retrieve_caller + msg)
when :error then
loggerr.error (retrieve_caller + msg)
when :warn then
loggerr.warn (retrieve_caller + msg)
when :info then
loggerr.info (retrieve_caller + msg)
when :debug then
loggerr.debug (retrieve_caller + msg)
else
loggerr.debug (retrieve_caller + msg)
end
end | ruby | {
"resource": ""
} |
q26548 | GHTorrent.APIClient.paged_api_request | test | def paged_api_request(url, pages = config(:mirror_history_pages_back),
last = nil)
url = ensure_max_per_page(url)
data = api_request_raw(url)
return [] if data.nil?
unless data.meta['link'].nil?
links = parse_links(data.meta['link'])
last = links['last'] if last.nil?
if pages > 0
pages = pages - 1
if pages == 0
return parse_request_result(data)
end
end
if links['next'].nil?
parse_request_result(data)
else
parse_request_result(data) | paged_api_request(links['next'], pages, last)
end
else
parse_request_result(data)
end
end | ruby | {
"resource": ""
} |
q26549 | GHTorrent.APIClient.last_updated | test | def last_updated(url, etag)
begin
ts = Time.now
response = do_request(url, '', etag)
info "Successful etag request. URL: #{url}, Etag: #{etag}, Remaining: #{@remaining}, Total: #{Time.now.to_ms - ts.to_ms} ms"
rescue OpenURI::HTTPError => e
response = e.io
if response.status.first != '304'
etag_request_error_message(url, e, etag)
raise e
end
end
return Time.parse(response.meta['last-modified']) unless response.meta['last-modified'].nil?
return Time.at(86400)
end | ruby | {
"resource": ""
} |
q26550 | GHTorrent.APIClient.num_pages | test | def num_pages(url)
url = ensure_max_per_page(url)
data = api_request_raw(url)
if data.nil? or data.meta.nil? or data.meta['link'].nil?
return 1
end
links = parse_links(data.meta['link'])
if links.nil? or links['last'].nil?
return 1
end
params = CGI::parse(URI::parse(links['last']).query)
params['page'][0].to_i
end | ruby | {
"resource": ""
} |
q26551 | GHTorrent.APIClient.parse_links | test | def parse_links(links)
links.split(/,/).reduce({}) do |acc, x|
matches = x.strip.match(/<(.*)>; rel=\"(.*)\"/)
acc[matches[2]] = matches[1]
acc
end
end | ruby | {
"resource": ""
} |
q26552 | GHTorrent.APIClient.parse_request_result | test | def parse_request_result(result)
if result.nil?
[]
else
json = result.read
if json.nil?
[]
else
r = JSON.parse(json)
# Add the etag to the response only for individual entities
if result.meta['etag'] and r.class != Array
r['etag'] = result.meta['etag']
end
r
end
end
end | ruby | {
"resource": ""
} |
q26553 | GHTorrent.APIClient.api_request_raw | test | def api_request_raw(url, media_type = '')
begin
start_time = Time.now
contents = do_request(url, media_type)
total = Time.now.to_ms - start_time.to_ms
info "Successful request. URL: #{url}, Remaining: #{@remaining}, Total: #{total} ms"
contents
rescue OpenURI::HTTPError => e
@remaining = e.io.meta['x-ratelimit-remaining'].to_i
@reset = e.io.meta['x-ratelimit-reset'].to_i
case e.io.status[0].to_i
# The following indicate valid Github return codes
when 400, # Bad request
403, # Forbidden
404, # Not found
409, # Conflict -- returned on gets of empty repos
422 then # Unprocessable entity
warn request_error_msg(url, e)
return nil
when 401 # Unauthorized
warn request_error_msg(url, e)
warn "Unauthorised request with token: #{@token}"
raise e
when 451 # DMCA takedown
warn request_error_msg(url, e)
warn "Repo was taken down (DMCA)"
return nil
else # Server error or HTTP conditions that Github does not report
warn request_error_msg(url, e)
raise e
end
rescue StandardError => e
warn error_msg(url, e)
raise e
ensure
# The exact limit is only enforced upon the first @reset
# No idea how many requests are available on this key. Sleep if we have run out
if @remaining < @req_limit
to_sleep = @reset - Time.now.to_i + 2
warn "Request limit reached, reset in: #{to_sleep} secs"
t = Thread.new do
slept = 0
while true do
debug "Sleeping for #{to_sleep - slept} seconds"
sleep 1
slept += 1
end
end
sleep([0, to_sleep].max)
t.exit
end
end
end | ruby | {
"resource": ""
} |
q26554 | GHTorrent.APIClient.attach_to | test | def attach_to(ip)
TCPSocket.instance_eval do
(class << self; self; end).instance_eval do
alias_method :original_open, :open
case RUBY_VERSION
when /1.8/, /1.9/
define_method(:open) do |conn_address, conn_port|
original_open(conn_address, conn_port, ip)
end
else
define_method(:open) do |conn_address, conn_port, local_host, local_port|
original_open(conn_address, conn_port, ip, local_port)
end
end
end
end
result = begin
yield
rescue StandardError => e
raise e
ensure
TCPSocket.instance_eval do
(class << self; self; end).instance_eval do
alias_method :open, :original_open
remove_method :original_open
end
end
end
result
end | ruby | {
"resource": ""
} |
q26555 | GHTorrent.Persister.connect | test | def connect(adapter, settings)
driver = ADAPTERS[adapter.intern]
driver.new(settings)
end | ruby | {
"resource": ""
} |
q26556 | GHTorrent.Retriever.retrieve_commit | test | def retrieve_commit(repo, sha, user)
commit = persister.find(:commits, {'sha' => "#{sha}"})
if commit.empty?
url = ghurl "repos/#{user}/#{repo}/commits/#{sha}"
c = api_request(url)
if c.nil? or c.empty?
return
end
# commit patches are big and not always interesting
if config(:commit_handling) == 'trim'
c['files'].each { |file| file.delete('patch') }
end
persister.store(:commits, c)
info "Added commit #{user}/#{repo} -> #{sha}"
c
else
debug "Commit #{user}/#{repo} -> #{sha} exists"
commit.first
end
end | ruby | {
"resource": ""
} |
q26557 | GHTorrent.Retriever.retrieve_commits | test | def retrieve_commits(repo, sha, user, pages = -1)
url = if sha.nil?
ghurl "repos/#{user}/#{repo}/commits"
else
ghurl "repos/#{user}/#{repo}/commits?sha=#{sha}"
end
commits = restricted_page_request(url, pages)
commits.map do |c|
retrieve_commit(repo, c['sha'], user)
end.select{|x| not x.nil?}
end | ruby | {
"resource": ""
} |
q26558 | GHTorrent.Retriever.retrieve_orgs | test | def retrieve_orgs(user)
url = ghurl "users/#{user}/orgs"
orgs = paged_api_request(url)
orgs.map{|o| retrieve_org(o['login'])}
end | ruby | {
"resource": ""
} |
q26559 | GHTorrent.Retriever.retrieve_watchers | test | def retrieve_watchers(user, repo)
repo_bound_items(user, repo, :watchers,
["repos/#{user}/#{repo}/stargazers"],
{'repo' => repo, 'owner' => user},
'login', item = nil, refresh = false, order = :desc)
end | ruby | {
"resource": ""
} |
q26560 | GHTorrent.Retriever.retrieve_watcher | test | def retrieve_watcher(user, repo, watcher)
repo_bound_item(user, repo, watcher, :watchers,
["repos/#{user}/#{repo}/stargazers"],
{'repo' => repo, 'owner' => user},
'login', order = :desc)
end | ruby | {
"resource": ""
} |
q26561 | GHTorrent.Retriever.get_repo_events | test | def get_repo_events(owner, repo)
url = ghurl("repos/#{owner}/#{repo}/events")
r = paged_api_request(url)
r.each do |e|
unless get_event(e['id']).empty?
debug "Repository event #{owner}/#{repo} -> #{e['type']}-#{e['id']} already exists"
else
persister.store(:events, e)
info "Added event for repository #{owner}/#{repo} -> #{e['type']}-#{e['id']}"
end
end
persister.find(:events, {'repo.name' => "#{owner}/#{repo}"})
end | ruby | {
"resource": ""
} |
q26562 | GHTorrent.Retriever.retrieve_master_branch_diff | test | def retrieve_master_branch_diff(owner, repo, branch, parent_owner, parent_repo, parent_branch)
branch = retrieve_default_branch(owner, repo) if branch.nil?
parent_branch = retrieve_default_branch(parent_owner, parent_repo) if parent_branch.nil?
return nil if branch.nil? or parent_branch.nil?
cmp_url = "https://api.github.com/repos/#{parent_owner}/#{parent_repo}/compare/#{parent_branch}...#{owner}:#{branch}"
api_request(cmp_url)
end | ruby | {
"resource": ""
} |
q26563 | GHTorrent.Retriever.retrieve_default_branch | test | def retrieve_default_branch(owner, repo, refresh = false)
retrieved = retrieve_repo(owner, repo, refresh)
return nil if retrieved.nil?
master_branch = 'master'
if retrieved['default_branch'].nil?
# The currently stored repo entry has been created before the
# default_branch field was added to the schema
retrieved = retrieve_repo(owner, repo, true)
return nil if retrieved.nil?
end
master_branch = retrieved['default_branch'] unless retrieved.nil?
master_branch
end | ruby | {
"resource": ""
} |
q26564 | GHTorrent.Command.process_options | test | def process_options
command = self
@options = Trollop::options(command.args) do
command.prepare_options(self)
banner <<-END
Standard options:
END
opt :config, 'config.yaml file location', :short => 'c',
:default => 'config.yaml'
opt :verbose, 'verbose mode', :short => 'v'
opt :addr, 'IP address to use for performing requests', :short => 'a',
:type => String
opt :token, 'GitHub OAuth token',
:type => String, :short => 't'
opt :req_limit, 'Number or requests to leave on any provided account (in reqs/hour)',
:type => Integer, :short => 'l'
opt :uniq, 'Unique name for this command. Will appear in logs.',
:type => String, :short => 'u'
end
end | ruby | {
"resource": ""
} |
q26565 | GHTorrent.Command.validate | test | def validate
if options[:config].nil?
unless (File.exist?("config.yaml"))
Trollop::die "No config file in default location (#{Dir.pwd}). You
need to specify the #{:config} parameter. Read the
documentation on how to create a config.yaml file."
end
else
Trollop::die "Cannot find file #{options[:config]}" \
unless File.exist?(options[:config])
end
unless @options[:user].nil?
if not Process.uid == 0
Trollop::die "Option --user (-u) can only be specified by root"
end
begin
Etc.getpwnam(@options[:user])
rescue ArgumentError
Trollop::die "No such user: #{@options[:user]}"
end
end
end | ruby | {
"resource": ""
} |
q26566 | GHTorrent.Command.queue_client | test | def queue_client(queue, key = queue, ack = :after, block)
stopped = false
while not stopped
begin
conn = Bunny.new(:host => config(:amqp_host),
:port => config(:amqp_port),
:username => config(:amqp_username),
:password => config(:amqp_password))
conn.start
ch = conn.create_channel
debug "Queue setting prefetch to #{config(:amqp_prefetch)}"
ch.prefetch(config(:amqp_prefetch))
debug "Queue connection to #{config(:amqp_host)} succeeded"
x = ch.topic(config(:amqp_exchange), :durable => true,
:auto_delete => false)
q = ch.queue(queue, :durable => true)
q.bind(x, :routing_key => key)
q.subscribe(:block => true,
:manual_ack => true) do |delivery_info, properties, msg|
if ack == :before
ch.acknowledge(delivery_info.delivery_tag)
end
begin
block.call(msg)
ensure
if ack != :before
ch.acknowledge(delivery_info.delivery_tag)
end
end
end
rescue Bunny::TCPConnectionFailed => e
warn "Connection to #{config(:amqp_host)} failed. Retrying in 1 sec"
sleep(1)
rescue Bunny::PossibleAuthenticationFailureError => e
warn "Could not authenticate as #{conn.username}"
rescue Bunny::NotFound, Bunny::AccessRefused, Bunny::PreconditionFailed => e
warn "Channel error: #{e}. Retrying in 1 sec"
sleep(1)
rescue Interrupt => _
stopped = true
rescue StandardError => e
raise e
end
end
ch.close unless ch.nil?
conn.close unless conn.nil?
end | ruby | {
"resource": ""
} |
q26567 | GHTorrent.Utils.read_value | test | def read_value(from, key)
return from if key.nil? or key == ""
key.split(/\./).reduce({}) do |acc, x|
unless acc.nil?
if acc.empty?
# Initial run
acc = from[x]
else
if acc.has_key?(x)
acc = acc[x]
else
# Some intermediate key does not exist
return nil
end
end
else
# Some intermediate key returned a null value
# This indicates a malformed entry
return nil
end
end
end | ruby | {
"resource": ""
} |
q26568 | GHTorrent.Geolocator.location_filter | test | def location_filter(location)
return nil if location.nil?
location.\
strip.\
downcase.\
tr('#"<>[]', '').\
gsub(/^[0-9,\/().:]*/, '').\
gsub(/ +/, ' ').\
gsub(/,([a-z]*)/, '\1')
end | ruby | {
"resource": ""
} |
q26569 | Karafka.BaseResponder.validate_usage! | test | def validate_usage!
registered_topics = self.class.topics.map do |name, topic|
topic.to_h.merge!(
usage_count: messages_buffer[name]&.count || 0
)
end
used_topics = messages_buffer.map do |name, usage|
topic = self.class.topics[name] || Responders::Topic.new(name, registered: false)
topic.to_h.merge!(usage_count: usage.count)
end
result = Karafka::Schemas::ResponderUsage.call(
registered_topics: registered_topics,
used_topics: used_topics
)
return if result.success?
raise Karafka::Errors::InvalidResponderUsageError, result.errors
end | ruby | {
"resource": ""
} |
q26570 | Karafka.BaseResponder.validate_options! | test | def validate_options!
return true unless self.class.options_schema
messages_buffer.each_value do |messages_set|
messages_set.each do |message_data|
result = self.class.options_schema.call(message_data.last)
next if result.success?
raise Karafka::Errors::InvalidResponderMessageOptionsError, result.errors
end
end
end | ruby | {
"resource": ""
} |
q26571 | Karafka.BaseResponder.deliver! | test | def deliver!
messages_buffer.each_value do |data_elements|
data_elements.each do |data, options|
# We map this topic name, so it will match namespaced/etc topic in Kafka
# @note By default will not change topic (if default mapper used)
mapped_topic = Karafka::App.config.topic_mapper.outgoing(options[:topic])
external_options = options.merge(topic: mapped_topic)
producer(options).call(data, external_options)
end
end
end | ruby | {
"resource": ""
} |
q26572 | Karafka.Process.notice_signal | test | def notice_signal(signal)
Thread.new do
Karafka.monitor.instrument('process.notice_signal', caller: self, signal: signal)
end
end | ruby | {
"resource": ""
} |
q26573 | TensorStream.EmbeddingLookup.embedding_lookup | test | def embedding_lookup(params, ids, partition_strategy: "mod", name: nil, validate_indices: true, max_norm: nil)
_embedding_lookup_and_transform(params, ids, partition_strategy: partition_strategy, name: name, max_norm: max_norm, transform_fn: nil)
end | ruby | {
"resource": ""
} |
q26574 | TensorStream.EmbeddingLookup._embedding_lookup_and_transform | test | def _embedding_lookup_and_transform(params, ids, partition_strategy: "mod", name: nil, max_norm: nil, transform_fn: nil)
raise TensorStream::ValueError, "Need at least one param" if params.nil?
params = [params] unless params.is_a?(Array)
TensorStream.name_scope(name, "embedding_lookup", values: params + [ids]) do |name|
np = params.size
ids = TensorStream.convert_to_tensor(ids, name: "ids")
if (np == 1) && (transform_fn.nil? || (ids.shape.size == 1))
result = nil
TensorStream.colocate_with(params[0]) do
result = _clip(TensorStream.gather(params[0], ids, name: name), ids, max_norm)
result = transform_fn.call(result) if transform_fn
end
return TensorStream.identity(result)
else
flat_ids = TensorStream.reshape(ids, [-1])
original_indices = TensorStream.range(TensorStream.size(flat_ids))
p_assignments = nil
new_ids = nil
if partition_strategy == "mod"
p_assignments = flat_ids % np
new_ids = floor_div(flat_ids, np)
elsif partition_strategy == "div"
raise "not yet supported!"
else
raise TensorStream::ValueError, "Unrecognized partition strategy: " + partition_strategy
end
p_assignments = TensorStream.cast(p_assignments, :int32)
gather_ids = TensorStream.dynamic_partition(new_ids, p_assignments, np)
pindices = TensorStream.dynamic_partition(original_indices, p_assignments, np)
partitioned_result = []
(0...np).each do |p|
pids = gather_ids[p]
result = nil
TensorStream.colocate_with(params[p]) do
result = TensorStream.gather(params[p], pids)
if transform_fn
# If transform_fn is provided, the clip_by_norm precedes
# the transform and hence must be co-located. See below
# for the counterpart if transform_fn is not proveded.
result = transform_fn.call(_clip(result, pids, max_norm))
end
end
partitioned_result << result
end
ret = TensorStream.dynamic_stitch(pindices, partitioned_result, name: name)
if transform_fn.nil?
element_shape_s = params[0].shape[1..-1]
params[1..-1].each { |p| element_shape_s = element_shape_s.merge_with(p.shape[1..-1]) }
else
element_shape_s = ret.shape[1..-1]
end
# Compute the dynamic element shape.
element_shape_d = if element_shape_s.fully_defined?
element_shape_s
elsif transform_fn.nil?
# It's important that we compute params[0].shape on the right device
# to avoid data motion.
TensorStream.colocate_with(params[0]) do
params_shape = TensorStream.shape(params[0])
params_shape[1..-1]
end
else
TensorStream.shape(ret)[1..-1]
end
ret = TensorStream.reshape(ret, TensorStream.concat([TensorStream.shape(ids), element_shape_d], 0))
ret = _clip(ret, ids, max_norm) unless transform_fn
ret
end
end
end | ruby | {
"resource": ""
} |
q26575 | TensorStream.Protobuf.load | test | def load(pbfile)
f = File.new(pbfile, "r")
lines = []
while !f.eof? && (str = f.readline.strip)
lines << str
end
evaluate_lines(lines)
end | ruby | {
"resource": ""
} |
q26576 | TensorStream.Ops.assert_equal | test | def assert_equal(x, y, data: nil, summarize: nil, message: nil, name: nil)
_op(:assert_equal, x, y, data: data, summarize: summarize, message: message, name: name)
end | ruby | {
"resource": ""
} |
q26577 | TensorStream.Ops.gradients | test | def gradients(tensor_ys, wrt_xs, name: "gradients", stop_gradients: nil)
tensor_ys = tensor_ys.op
gs = wrt_xs.map(&:op).collect { |x|
stops = stop_gradients ? stop_gradients.map(&:name).join("_") : ""
gradient_program_name = "grad_#{tensor_ys.name}_#{x.name}_#{stops}".to_sym
tensor_graph = tensor_ys.graph
tensor_program = if tensor_graph.node_added?(gradient_program_name)
tensor_graph.get_node(gradient_program_name)
else
tensor_graph.name_scope("gradient_wrt_#{x.name}") do
derivative_ops = TensorStream::MathGradients.derivative(tensor_ys, x, graph: tensor_graph,
stop_gradients: stop_gradients)
tensor_graph.add_node!(gradient_program_name, derivative_ops)
end
end
tensor_program
}
gs
end | ruby | {
"resource": ""
} |
q26578 | TensorStream.Ops.random_normal | test | def random_normal(shape, dtype: :float32, mean: 0.0, stddev: 1.0, seed: nil, name: nil)
options = {dtype: dtype, mean: mean, stddev: stddev, seed: seed, name: name}
_op(:random_standard_normal, shape, options)
end | ruby | {
"resource": ""
} |
q26579 | TensorStream.Ops.eye | test | def eye(num_rows, num_columns: nil, dtype: :float32, name: nil)
_op(:eye, num_rows, num_columns || num_rows, data_type: dtype, name: name)
end | ruby | {
"resource": ""
} |
q26580 | TensorStream.Ops.glorot_uniform_initializer | test | def glorot_uniform_initializer(seed: nil, dtype: nil)
TensorStream::Initializer.new(-> { _op(:glorot_uniform, seed: seed, data_type: dtype) })
end | ruby | {
"resource": ""
} |
q26581 | TensorStream.Ops.random_uniform_initializer | test | def random_uniform_initializer(minval: 0, maxval: 1, seed: nil, dtype: nil)
TensorStream::Initializer.new(-> { _op(:random_uniform, minval: 0, maxval: 1, seed: seed, data_type: dtype) })
end | ruby | {
"resource": ""
} |
q26582 | TensorStream.Ops.slice | test | def slice(input, start, size, name: nil)
_op(:slice, input, start, size: size, name: name)
end | ruby | {
"resource": ""
} |
q26583 | TensorStream.Ops.ones | test | def ones(shape, dtype: :float32, name: nil)
_op(:ones, shape, data_type: dtype, name: name)
end | ruby | {
"resource": ""
} |
q26584 | TensorStream.Ops.logical_and | test | def logical_and(input_a, input_b, name: nil)
check_data_types(input_a, input_b)
_op(:logical_and, input_a, input_b, name: name)
end | ruby | {
"resource": ""
} |
q26585 | TensorStream.Ops.reduce_mean | test | def reduce_mean(input_tensor, axis = nil, keepdims: false, name: nil)
reduce(:mean, input_tensor, axis, keepdims: keepdims, name: name)
end | ruby | {
"resource": ""
} |
q26586 | TensorStream.Ops.concat | test | def concat(values, axis, name: "concat")
if values.is_a?(Array)
_op(:concat, axis, *values, name: name)
else
_op(:concat, axis, values, name: name)
end
end | ruby | {
"resource": ""
} |
q26587 | TensorStream.Ops.dynamic_partition | test | def dynamic_partition(data, partitions, num_partitions, name: nil)
result = _op(:dynamic_partition, data, partitions, num_partitions: num_partitions, name: nil)
num_partitions.times.map do |index|
result[index]
end
end | ruby | {
"resource": ""
} |
q26588 | TensorStream.Ops.where | test | def where(condition, true_t = nil, false_t = nil, name: nil)
_op(:where, condition, true_t, false_t, name: name)
end | ruby | {
"resource": ""
} |
q26589 | TensorStream.Ops.asin | test | def asin(input, name: nil)
check_allowed_types(input, FLOATING_POINT_TYPES)
_op(:asin, input, name: name)
end | ruby | {
"resource": ""
} |
q26590 | TensorStream.Ops.acos | test | def acos(input, name: nil)
check_allowed_types(input, FLOATING_POINT_TYPES)
_op(:acos, input, name: name)
end | ruby | {
"resource": ""
} |
q26591 | TensorStream.Ops.atan | test | def atan(input, name: nil)
check_allowed_types(input, FLOATING_POINT_TYPES)
_op(:atan, input, name: name)
end | ruby | {
"resource": ""
} |
q26592 | TensorStream.Ops.cast | test | def cast(input, dtype, name: nil)
input = convert_to_tensor(input)
return input if input.data_type == dtype
_op(:cast, input, data_type: dtype, name: name)
end | ruby | {
"resource": ""
} |
q26593 | TensorStream.Ops.print | test | def print(input, data, message: nil, name: nil)
_op(:print, input, data, message: message, name: name)
end | ruby | {
"resource": ""
} |
q26594 | TensorStream.Ops.sec | test | def sec(input, name: nil)
check_allowed_types(input, FLOATING_POINT_TYPES)
_op(:sec, input, name: name)
end | ruby | {
"resource": ""
} |
q26595 | TensorStream.Ops.sqrt | test | def sqrt(input, name: nil)
check_allowed_types(input, FLOATING_POINT_TYPES)
_op(:sqrt, input, name: name)
end | ruby | {
"resource": ""
} |
q26596 | TensorStream.Ops.log | test | def log(input, name: nil)
check_allowed_types(input, FLOATING_POINT_TYPES)
_op(:log, input, name: name)
end | ruby | {
"resource": ""
} |
q26597 | TensorStream.Ops.exp | test | def exp(input, name: nil)
check_allowed_types(input, FLOATING_POINT_TYPES)
_op(:exp, input, name: name)
end | ruby | {
"resource": ""
} |
q26598 | TensorStream.Ops.pad | test | def pad(tensor, paddings, mode: "CONSTANT", name: nil)
_op(:pad, tensor, paddings, mode: mode, name: name)
end | ruby | {
"resource": ""
} |
q26599 | TensorStream.Ops.gather | test | def gather(params, indices, validate_indices: nil,
name: nil,
axis: 0)
_op(:gather, params, indices, validate_indices: validate_indices, name: name, axis: axis)
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.